Really-amin commited on
Commit
2615875
·
verified ·
1 Parent(s): 62ccadb

Update app/api/dashboard.py

Browse files
Files changed (1) hide show
  1. app/api/dashboard.py +221 -132
app/api/dashboard.py CHANGED
@@ -1,193 +1,282 @@
1
- from fastapi import APIRouter, Depends
2
- from typing import Dict, Any
3
  import logging
4
- from datetime import datetime
5
  import os
6
  import psutil
7
- import sys
8
- import platform
9
  from pathlib import Path
 
10
  import json
11
- import asyncio
 
12
 
13
- # Redis Client
 
 
 
 
 
 
14
  try:
15
- import redis
 
16
  REDIS_AVAILABLE = True
17
- except ImportError:
 
18
  REDIS_AVAILABLE = False
 
19
 
20
- logger = logging.getLogger(__name__)
21
- router = APIRouter()
22
-
23
- # ----------------------------
24
- # Redis Connection
25
- # ----------------------------
26
- REDIS_HOST = os.getenv("REDIS_HOST", "localhost")
27
- REDIS_PORT = int(os.getenv("REDIS_PORT", 6379))
28
- REDIS_CACHE_TTL = 30 # seconds
29
 
30
- redis_client = None
31
- if REDIS_AVAILABLE:
32
- try:
33
- redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True)
34
- redis_client.ping()
35
- logger.info(" Connected to Redis")
36
- except Exception as e:
37
- redis_client = None
38
- logger.warning(f"⚠️ Redis unavailable: {e}")
39
 
40
- # ----------------------------
41
- # In-memory fallback cache
42
- # ----------------------------
43
- LOCAL_CACHE = {}
 
 
44
 
45
- def set_cache(key: str, value: Any, ttl: int = REDIS_CACHE_TTL):
 
 
 
46
  try:
47
- if redis_client:
48
  redis_client.setex(key, ttl, json.dumps(value))
49
  else:
50
- LOCAL_CACHE[key] = {"data": value, "expire": datetime.now().timestamp() + ttl}
51
- except Exception:
52
- LOCAL_CACHE[key] = {"data": value, "expire": datetime.now().timestamp() + ttl}
53
 
54
- def get_cache(key: str):
55
  try:
56
- if redis_client:
57
  data = redis_client.get(key)
58
  return json.loads(data) if data else None
59
  else:
60
- cached = LOCAL_CACHE.get(key)
61
- if cached and cached["expire"] > datetime.now().timestamp():
62
- return cached["data"]
63
- return None
64
  except Exception:
65
  return None
 
66
 
67
- # ----------------------------
68
- # Simulated database query
69
- # ----------------------------
70
- async def query_database_stats():
71
- """
72
- TODO: Replace with actual DB query logic
73
- """
74
- await asyncio.sleep(0.1) # simulate async query
75
- return {
76
- "total_documents": 200,
77
- "processed_today": 35,
78
- "active_reports": 10
79
- }
80
-
81
- # ----------------------------
82
- # Endpoints
83
- # ----------------------------
 
 
 
84
 
85
  @router.get("/stats")
86
  async def get_dashboard_stats():
87
- cache_key = "dashboard_stats"
88
- cached = get_cache(cache_key)
89
- if cached:
90
- return {"success": True, "data": cached}
91
-
92
  try:
93
- stats = await query_database_stats()
 
 
 
 
 
 
 
 
 
 
94
 
95
- # Uptime
96
  try:
97
  uptime_seconds = psutil.boot_time()
98
  uptime = datetime.now() - datetime.fromtimestamp(uptime_seconds)
99
- stats["uptime"] = f"{uptime.days}d {uptime.seconds // 3600}h"
100
  except Exception:
101
  stats["uptime"] = "N/A"
102
 
103
- # Storage
104
- disk_usage = psutil.disk_usage('/')
105
- stats["storage_used"] = f"{disk_usage.used // (1024**3)}GB"
106
- stats["storage_total"] = f"{disk_usage.total // (1024**3)}GB"
107
- stats["storage_percent"] = round((disk_usage.used / disk_usage.total) * 100, 1)
 
 
 
 
108
 
109
- # Memory
110
- memory = psutil.virtual_memory()
111
- stats["memory_percent"] = round(memory.percent, 1)
112
- stats["memory_used"] = f"{memory.used // (1024**2)}MB"
113
- stats["memory_total"] = f"{memory.total // (1024**2)}MB"
 
 
 
 
114
 
115
- set_cache(cache_key, stats)
116
  return {"success": True, "data": stats}
117
  except Exception as e:
118
- logger.error(f"Error fetching stats: {e}")
119
- return {"success": False, "error": str(e)}
120
-
121
 
122
  @router.get("/recent-activity")
123
  async def get_recent_activity(limit: int = 10):
124
- cache_key = f"recent_activity_{limit}"
125
- cached = get_cache(cache_key)
126
- if cached:
127
- return {"success": True, "activities": cached, "total": len(cached)}
128
-
129
- # TODO: Replace with DB query
130
- activities = [
131
- {"id": 1, "type": "upload", "title": "فایل جدید آپلود شد", "description": "document_contract_2024.pdf", "timestamp": "۲ ساعت پیش"},
132
- {"id": 2, "type": "process", "title": "پردازش OCR تکمیل شد", "description": "استخراج متن از ۳ سند", "timestamp": "۴ ساعت پیش"},
133
- ][:limit]
134
-
135
- set_cache(cache_key, activities)
136
- return {"success": True, "activities": activities, "total": len(activities)}
137
-
138
 
139
  @router.get("/system-info")
140
  async def get_system_info():
141
- cache_key = "system_info"
142
- cached = get_cache(cache_key)
143
- if cached:
144
- return {"success": True, "system_info": cached}
145
-
146
  try:
147
- info = {
148
- "platform": platform.platform(),
149
- "cpu_count": psutil.cpu_count(),
150
- "cpu_percent": psutil.cpu_percent(interval=1),
151
- "app_version": "2.1.0",
152
- "python_version": f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}",
153
- }
 
 
 
 
154
 
155
- dirs = {}
156
- for name in ["data", "logs", "cache"]:
157
- path = Path(f"/app/{name}")
158
- dirs[name] = {
159
- "exists": path.exists(),
160
- "size": get_directory_size(path) if path.exists() else 0
 
 
161
  }
162
- info["directories"] = dirs
 
 
163
 
164
- set_cache(cache_key, info)
165
- return {"success": True, "system_info": info}
166
  except Exception as e:
 
167
  return {"success": False, "error": str(e)}
168
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
169
 
170
  @router.get("/performance-metrics")
171
  async def get_performance_metrics():
172
  try:
173
- metrics = {
174
- "cpu": {"percent": psutil.cpu_percent(interval=1), "count": psutil.cpu_count()},
175
- "memory": dict(psutil.virtual_memory()._asdict()),
176
- "disk": dict(psutil.disk_usage('/')._asdict()),
177
- "network": dict(psutil.net_io_counters()._asdict())
178
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
179
  return {"success": True, "metrics": metrics, "timestamp": datetime.now().isoformat()}
180
  except Exception as e:
 
181
  return {"success": False, "error": str(e)}
182
 
 
 
 
 
 
 
 
 
183
 
184
- def get_directory_size(path: Path) -> int:
185
- total_size = 0
186
- for dirpath, _, filenames in os.walk(path):
187
- for filename in filenames:
188
- filepath = os.path.join(dirpath, filename)
189
- try:
190
- total_size += os.path.getsize(filepath)
191
- except (OSError, FileNotFoundError):
192
- pass
193
- return total_size
 
 
 
 
 
1
+ from fastapi import APIRouter, HTTPException, Depends
2
+ from typing import List, Dict, Any, Optional
3
  import logging
4
+ from datetime import datetime, timedelta
5
  import os
6
  import psutil
 
 
7
  from pathlib import Path
8
+ import redis
9
  import json
10
+ import platform
11
+ import sys
12
 
13
+ logger = logging.getLogger(__name__)
14
+
15
+ router = APIRouter()
16
+
17
+ # ---------------------------
18
+ # Redis connection
19
+ # ---------------------------
20
  try:
21
+ redis_client = redis.Redis(host="localhost", port=6379, db=0)
22
+ redis_client.ping()
23
  REDIS_AVAILABLE = True
24
+ logger.info("✅ Redis connection established successfully.")
25
+ except Exception:
26
  REDIS_AVAILABLE = False
27
+ logger.warning("⚠️ Redis connection failed. Using in-memory fallback cache.")
28
 
29
+ # In-memory cache fallback
30
+ memory_cache = {}
 
 
 
 
 
 
 
31
 
32
+ # ---------------------------
33
+ # Mock data
34
+ # ---------------------------
35
+ MOCK_STATS = {
36
+ "total_documents": 145,
37
+ "processed_today": 23,
38
+ "active_reports": 8,
39
+ "uptime": "99.8%"
40
+ }
41
 
42
+ MOCK_ACTIVITIES = [
43
+ {"id": 1, "type": "upload", "title": "فایل جدید آپلود شد", "description": "document_contract_2024.pdf", "timestamp": "۲ ساعت پیش"},
44
+ {"id": 2, "type": "process", "title": "پردازش OCR تکمیل شد", "description": "استخراج متن از ۳ سند", "timestamp": "۴ ساعت پیش"},
45
+ {"id": 3, "type": "search", "title": "جستجو انجام شد", "description": "جستجو برای 'قرارداد خرید'", "timestamp": "۶ ساعت پیش"},
46
+ {"id": 4, "type": "export", "title": "گزارش صادر شد", "description": "گزارش آماری ماهانه", "timestamp": "۱ روز پیش"}
47
+ ]
48
 
49
+ # ---------------------------
50
+ # Cache helper functions
51
+ # ---------------------------
52
+ def cache_set(key: str, value: dict, ttl: int = 30):
53
  try:
54
+ if REDIS_AVAILABLE:
55
  redis_client.setex(key, ttl, json.dumps(value))
56
  else:
57
+ memory_cache[key] = {"data": value, "expire": datetime.now() + timedelta(seconds=ttl)}
58
+ except Exception as e:
59
+ logger.error(f"Cache set failed: {e}")
60
 
61
+ def cache_get(key: str):
62
  try:
63
+ if REDIS_AVAILABLE:
64
  data = redis_client.get(key)
65
  return json.loads(data) if data else None
66
  else:
67
+ if key in memory_cache and datetime.now() < memory_cache[key]["expire"]:
68
+ return memory_cache[key]["data"]
 
 
69
  except Exception:
70
  return None
71
+ return None
72
 
73
+ # ---------------------------
74
+ # Helper function
75
+ # ---------------------------
76
+ def get_directory_size(path: Path) -> int:
77
+ try:
78
+ total_size = 0
79
+ for dirpath, _, filenames in os.walk(path):
80
+ for filename in filenames:
81
+ filepath = os.path.join(dirpath, filename)
82
+ try:
83
+ total_size += os.path.getsize(filepath)
84
+ except (OSError, FileNotFoundError):
85
+ pass
86
+ return total_size
87
+ except Exception:
88
+ return 0
89
+
90
+ # ---------------------------
91
+ # API Endpoints
92
+ # ---------------------------
93
 
94
  @router.get("/stats")
95
  async def get_dashboard_stats():
 
 
 
 
 
96
  try:
97
+ cache_key = "dashboard_stats"
98
+ cached_data = cache_get(cache_key)
99
+ if cached_data:
100
+ return {"success": True, "data": cached_data}
101
+
102
+ stats = {}
103
+ try:
104
+ stats.update(MOCK_STATS)
105
+ except Exception as e:
106
+ logger.warning(f"Database stats unavailable: {e}")
107
+ stats.update(MOCK_STATS)
108
 
 
109
  try:
110
  uptime_seconds = psutil.boot_time()
111
  uptime = datetime.now() - datetime.fromtimestamp(uptime_seconds)
112
+ stats["uptime"] = f"{uptime.days}d {uptime.seconds//3600}h"
113
  except Exception:
114
  stats["uptime"] = "N/A"
115
 
116
+ try:
117
+ disk_usage = psutil.disk_usage('/')
118
+ stats["storage_used"] = f"{disk_usage.used // (1024**3)}GB"
119
+ stats["storage_total"] = f"{disk_usage.total // (1024**3)}GB"
120
+ stats["storage_percent"] = round((disk_usage.used / disk_usage.total) * 100, 1)
121
+ except Exception:
122
+ stats["storage_used"] = "N/A"
123
+ stats["storage_total"] = "N/A"
124
+ stats["storage_percent"] = 0
125
 
126
+ try:
127
+ memory = psutil.virtual_memory()
128
+ stats["memory_percent"] = round(memory.percent, 1)
129
+ stats["memory_used"] = f"{memory.used // (1024**2)}MB"
130
+ stats["memory_total"] = f"{memory.total // (1024**2)}MB"
131
+ except Exception:
132
+ stats["memory_percent"] = 0
133
+ stats["memory_used"] = "N/A"
134
+ stats["memory_total"] = "N/A"
135
 
136
+ cache_set(cache_key, stats)
137
  return {"success": True, "data": stats}
138
  except Exception as e:
139
+ logger.error(f"Error in /stats: {e}")
140
+ return {"success": False, "error": str(e), "data": MOCK_STATS}
 
141
 
142
  @router.get("/recent-activity")
143
  async def get_recent_activity(limit: int = 10):
144
+ try:
145
+ return {"success": True, "activities": MOCK_ACTIVITIES[:limit], "total": len(MOCK_ACTIVITIES[:limit])}
146
+ except Exception as e:
147
+ logger.error(f"Error in /recent-activity: {e}")
148
+ return {"success": False, "error": str(e), "activities": []}
 
 
 
 
 
 
 
 
 
149
 
150
  @router.get("/system-info")
151
  async def get_system_info():
 
 
 
 
 
152
  try:
153
+ system_info = {}
154
+ try:
155
+ system_info["platform"] = platform.platform()
156
+ system_info["cpu_count"] = psutil.cpu_count()
157
+ system_info["cpu_percent"] = psutil.cpu_percent(interval=1)
158
+ except Exception as e:
159
+ logger.warning(f"System info error: {e}")
160
+ system_info.update({"platform": "Unknown", "cpu_count": "N/A", "cpu_percent": 0})
161
+
162
+ system_info["app_version"] = "2.0.0"
163
+ system_info["python_version"] = sys.version
164
 
165
+ try:
166
+ data_dir = Path("/app/data")
167
+ logs_dir = Path("/app/logs")
168
+ cache_dir = Path("/app/cache")
169
+ system_info["directories"] = {
170
+ "data": {"exists": data_dir.exists(), "size": get_directory_size(data_dir) if data_dir.exists() else 0},
171
+ "logs": {"exists": logs_dir.exists(), "size": get_directory_size(logs_dir) if logs_dir.exists() else 0},
172
+ "cache": {"exists": cache_dir.exists(), "size": get_directory_size(cache_dir) if cache_dir.exists() else 0},
173
  }
174
+ except Exception as e:
175
+ logger.warning(f"Directory info error: {e}")
176
+ system_info["directories"] = {}
177
 
178
+ return {"success": True, "system_info": system_info}
 
179
  except Exception as e:
180
+ logger.error(f"Error in /system-info: {e}")
181
  return {"success": False, "error": str(e)}
182
 
183
+ @router.get("/health-check")
184
+ async def dashboard_health_check():
185
+ try:
186
+ health_status = {"overall": "healthy", "checks": {}}
187
+ try:
188
+ health_status["checks"]["database"] = {"status": "healthy", "message": "Database connection OK"}
189
+ except Exception as e:
190
+ health_status["checks"]["database"] = {"status": "unhealthy", "message": str(e)}
191
+ health_status["overall"] = "degraded"
192
+
193
+ try:
194
+ data_dir = Path("/app/data")
195
+ if not data_dir.exists():
196
+ data_dir.mkdir(parents=True, exist_ok=True)
197
+ test_file = data_dir / "health_check.tmp"
198
+ test_file.write_text("test")
199
+ test_file.unlink()
200
+ health_status["checks"]["filesystem"] = {"status": "healthy", "message": "File system OK"}
201
+ except Exception as e:
202
+ health_status["checks"]["filesystem"] = {"status": "unhealthy", "message": str(e)}
203
+ health_status["overall"] = "degraded"
204
+
205
+ try:
206
+ memory = psutil.virtual_memory()
207
+ if memory.percent > 90:
208
+ health_status["checks"]["memory"] = {"status": "warning", "message": f"High memory usage: {memory.percent}%"}
209
+ health_status["overall"] = "degraded"
210
+ else:
211
+ health_status["checks"]["memory"] = {"status": "healthy", "message": f"Memory usage OK: {memory.percent}%"}
212
+ except Exception as e:
213
+ health_status["checks"]["memory"] = {"status": "unknown", "message": str(e)}
214
+
215
+ try:
216
+ disk = psutil.disk_usage('/')
217
+ disk_percent = (disk.used / disk.total) * 100
218
+ if disk_percent > 90:
219
+ health_status["checks"]["disk"] = {"status": "warning", "message": f"High disk usage: {disk_percent:.1f}%"}
220
+ health_status["overall"] = "degraded"
221
+ else:
222
+ health_status["checks"]["disk"] = {"status": "healthy", "message": f"Disk usage OK: {disk_percent:.1f}%"}
223
+ except Exception as e:
224
+ health_status["checks"]["disk"] = {"status": "unknown", "message": str(e)}
225
+
226
+ return {"success": True, "health": health_status, "timestamp": datetime.now().isoformat()}
227
+ except Exception as e:
228
+ logger.error(f"Error in health-check: {e}")
229
+ return {"success": False, "error": str(e), "health": {"overall": "unhealthy", "checks": {}}}
230
 
231
  @router.get("/performance-metrics")
232
  async def get_performance_metrics():
233
  try:
234
+ metrics = {}
235
+ try:
236
+ metrics["cpu"] = {"percent": psutil.cpu_percent(interval=1), "count": psutil.cpu_count(), "freq": psutil.cpu_freq()._asdict() if psutil.cpu_freq() else None}
237
+ except Exception as e:
238
+ metrics["cpu"] = {"percent": 0, "count": "N/A", "freq": None}
239
+ try:
240
+ memory = psutil.virtual_memory()
241
+ metrics["memory"] = {"total": memory.total, "used": memory.used, "free": memory.free, "percent": memory.percent}
242
+ except Exception:
243
+ metrics["memory"] = {"total": 0, "used": 0, "free": 0, "percent": 0}
244
+ try:
245
+ disk = psutil.disk_usage('/')
246
+ metrics["disk"] = {"total": disk.total, "used": disk.used, "free": disk.free, "percent": (disk.used / disk.total) * 100}
247
+ except Exception:
248
+ metrics["disk"] = {"total": 0, "used": 0, "free": 0, "percent": 0}
249
+ try:
250
+ network = psutil.net_io_counters()
251
+ metrics["network"] = {"bytes_sent": network.bytes_sent, "bytes_recv": network.bytes_recv, "packets_sent": network.packets_sent, "packets_recv": network.packets_recv}
252
+ except Exception:
253
+ metrics["network"] = {"bytes_sent": 0, "bytes_recv": 0, "packets_sent": 0, "packets_recv": 0}
254
+
255
  return {"success": True, "metrics": metrics, "timestamp": datetime.now().isoformat()}
256
  except Exception as e:
257
+ logger.error(f"Error in performance-metrics: {e}")
258
  return {"success": False, "error": str(e)}
259
 
260
+ @router.post("/log-activity")
261
+ async def log_activity(activity_data: dict):
262
+ try:
263
+ logger.info(f"Activity logged: {activity_data}")
264
+ return {"success": True, "message": "Activity logged successfully"}
265
+ except Exception as e:
266
+ logger.error(f"Error logging activity: {e}")
267
+ return {"success": False, "error": str(e)}
268
 
269
+ @router.get("/export-dashboard-data")
270
+ async def export_dashboard_data():
271
+ try:
272
+ export_data = {
273
+ "timestamp": datetime.now().isoformat(),
274
+ "stats": await get_dashboard_stats(),
275
+ "activities": await get_recent_activity(),
276
+ "system_info": await get_system_info(),
277
+ "performance": await get_performance_metrics()
278
+ }
279
+ return {"success": True, "data": export_data}
280
+ except Exception as e:
281
+ logger.error(f"Error exporting dashboard data: {e}")
282
+ return {"success": False, "error": str(e)}