seawolf2357 commited on
Commit
a7afc5f
Β·
verified Β·
1 Parent(s): db06ad2

Upload app_routes.py

Browse files
Files changed (1) hide show
  1. app_routes.py +512 -0
app_routes.py ADDED
@@ -0,0 +1,512 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Request
2
+ from fastapi.responses import StreamingResponse
3
+ import aiosqlite, asyncio, random, json, logging
4
+ from npc_core import (
5
+ ALL_TICKERS, TRADING_STRATEGIES, IDENTITY_STRATEGY_MAP,
6
+ NPCTargetPriceEngine, NPCElasticityEngine, GroqAIClient,)
7
+ from npc_trading import (
8
+ get_all_prices, get_ticker_positions, get_trading_leaderboard, get_trading_stats,
9
+ MarketDataFetcher, STOCK_TICKERS, CRYPTO_TICKERS,
10
+ record_profit_snapshots, backfill_profit_snapshots, get_hall_of_fame_data,
11
+ get_npc_trade_history, get_research_feed, get_research_detail,
12
+ purchase_research, get_research_stats, get_market_pulse,)
13
+ from npc_intelligence import (
14
+ MarketIndexCollector, ScreeningEngine, NPCResearchEngine,
15
+ load_indices_from_db, save_indices_to_db, load_news_from_db,
16
+ save_analysis_to_db, load_analysis_from_db, load_all_analyses_from_db,)
17
+ from npc_memory_evolution import get_npc_evolution_stats, get_evolution_leaderboard
18
+ from npc_sec_enforcement import get_sec_dashboard
19
+ logger = logging.getLogger(__name__)
20
+ router = APIRouter()
21
+ _DB_PATH = ""
22
+ _GROQ_KEY = ""
23
+ _EventBus = None
24
+ def configure(db_path: str, groq_key: str, event_bus_cls):
25
+ global _DB_PATH, _GROQ_KEY, _EventBus
26
+ _DB_PATH, _GROQ_KEY, _EventBus = db_path, groq_key, event_bus_cls
27
+ @router.get("/api/trading/prices")
28
+ async def api_trading_prices():
29
+ return await get_all_prices(_DB_PATH)
30
+ @router.get("/api/trading/ticker/{ticker}")
31
+ async def api_ticker_detail(ticker: str):
32
+ return await get_ticker_positions(_DB_PATH, ticker)
33
+ @router.get("/api/trading/chart/{ticker}")
34
+ async def api_ticker_chart(ticker: str, period: str = "1mo"):
35
+ if period not in ['1d', '5d', '1mo', '3mo', '6mo', '1y']: period = '1mo'
36
+ return {"ticker": ticker, "period": period, "data": MarketDataFetcher.fetch_chart_data(ticker, period)}
37
+ @router.get("/api/trading/leaderboard")
38
+ async def api_trading_leaderboard(limit: int = 30):
39
+ return await get_trading_leaderboard(_DB_PATH, min(limit, 50))
40
+ @router.get("/api/trading/stats")
41
+ async def api_trading_stats():
42
+ return await get_trading_stats(_DB_PATH)
43
+ @router.get("/api/trading/tickers")
44
+ async def api_trading_tickers():
45
+ return {"stocks": STOCK_TICKERS, "crypto": CRYPTO_TICKERS}
46
+ @router.get("/api/strategies")
47
+ async def api_strategies():
48
+ strats = [{'key': k, 'name': v['name'], 'category': v['category'], 'timeframe': v['timeframe'], 'signal': v['signal'], 'method': v['method'], 'entry': v['entry'], 'tip': v.get('tip', '')} for k, v in TRADING_STRATEGIES.items()]
49
+ return {"strategies": strats, "identity_map": {k: v for k, v in IDENTITY_STRATEGY_MAP.items()}}
50
+ @router.get("/api/hall-of-fame")
51
+ async def api_hall_of_fame(period: str = "3d"):
52
+ return await get_hall_of_fame_data(_DB_PATH, period=period, limit=30)
53
+ @router.post("/api/hall-of-fame/snapshot")
54
+ async def api_force_snapshot():
55
+ return {"recorded": await record_profit_snapshots(_DB_PATH)}
56
+ @router.post("/api/hall-of-fame/backfill")
57
+ async def api_force_backfill():
58
+ return {"backfilled": await backfill_profit_snapshots(_DB_PATH, force=True)}
59
+ @router.get("/api/hall-of-fame/history/{agent_id}")
60
+ async def api_npc_trade_history(agent_id: str):
61
+ return await get_npc_trade_history(_DB_PATH, agent_id)
62
+ @router.get("/api/intelligence/indices")
63
+ async def api_market_indices():
64
+ indices = await load_indices_from_db(_DB_PATH)
65
+ if not indices:
66
+ raw = MarketIndexCollector.fetch_indices(); await save_indices_to_db(_DB_PATH, raw); indices = raw
67
+ return {"indices": indices}
68
+ @router.get("/api/intelligence/screening/{ticker}")
69
+ async def api_screening_data(ticker: str):
70
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
71
+ await db.execute("PRAGMA busy_timeout=30000")
72
+ try:
73
+ cursor = await db.execute("SELECT ticker, price, change_pct, volume, market_cap, rsi, pe_ratio, high_52w, low_52w, from_high, from_low FROM market_prices WHERE ticker=?", (ticker,))
74
+ row = await cursor.fetchone()
75
+ if row: return {"ticker": row[0], "price": row[1], "change_pct": row[2], "volume": row[3], "market_cap": row[4], "rsi": row[5], "pe_ratio": row[6], "high_52w": row[7], "low_52w": row[8], "from_high": row[9], "from_low": row[10]}
76
+ except: pass
77
+ return {"ticker": ticker, "error": "no data"}
78
+ @router.get("/api/intelligence/target/{ticker}")
79
+ async def api_target_price(ticker: str):
80
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
81
+ await db.execute("PRAGMA busy_timeout=30000")
82
+ try:
83
+ cursor = await db.execute("SELECT price, rsi, pe_ratio, from_high, from_low FROM market_prices WHERE ticker=?", (ticker,))
84
+ row = await cursor.fetchone()
85
+ if row:
86
+ screening = {'price': row[0], 'rsi': row[1], 'pe_ratio': row[2], 'from_high': row[3], 'from_low': row[4]}
87
+ ticker_info = next((t for t in ALL_TICKERS if t['ticker'] == ticker), {})
88
+ target = NPCTargetPriceEngine.calculate_target(ticker, row[0], screening, ticker_info.get('type', 'stock'))
89
+ elasticity = NPCElasticityEngine.calculate(row[0], screening, target['target_price'], ticker_info.get('type', 'stock'))
90
+ return {**target, **elasticity, 'screening': screening}
91
+ except: pass
92
+ return {"ticker": ticker, "error": "no data"}
93
+ @router.get("/api/news/feed")
94
+ async def api_news_feed(ticker: str = None, limit: int = 50):
95
+ news = await load_news_from_db(_DB_PATH, ticker, limit)
96
+ try:
97
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
98
+ await db.execute("PRAGMA busy_timeout=30000")
99
+ for n in news:
100
+ nid = n.get('id')
101
+ if nid:
102
+ rc = await db.execute("SELECT emoji, COUNT(*) FROM news_reactions WHERE news_id=? GROUP BY emoji", (nid,))
103
+ n['reactions'] = {r[0]: r[1] for r in await rc.fetchall()}
104
+ else: n['reactions'] = {}
105
+ except: pass
106
+ return {"news": news, "count": len(news)}
107
+ @router.post("/api/news/react")
108
+ async def api_news_react(request: Request):
109
+ body = await request.json(); email = body.get("email", ""); news_id = body.get("news_id"); emoji = body.get("emoji", "")
110
+ if not email or not news_id or emoji not in ('πŸ‘','πŸ”₯','😱','πŸ’€','πŸš€','πŸ˜‚'): return {"error": "Invalid reaction"}
111
+ try:
112
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
113
+ await db.execute("PRAGMA busy_timeout=30000"); await db.execute("INSERT OR IGNORE INTO news_reactions (news_id, user_email, emoji) VALUES (?,?,?)", (news_id, email, emoji)); await db.commit()
114
+ rc = await db.execute("SELECT emoji, COUNT(*) FROM news_reactions WHERE news_id=? GROUP BY emoji", (news_id,))
115
+ return {"success": True, "reactions": {r[0]: r[1] for r in await rc.fetchall()}}
116
+ except Exception as e: return {"error": str(e)}
117
+ @router.get("/api/market/pulse")
118
+ async def api_market_pulse():
119
+ try:
120
+ pulse = await get_market_pulse(_DB_PATH); pulse['indices'] = await load_indices_from_db(_DB_PATH) or []
121
+ return pulse
122
+ except Exception as e:
123
+ logger.error(f"Market pulse error: {e}")
124
+ return {"hot_movers": [], "indices": [], "activity": {}}
125
+ @router.get("/api/research/feed")
126
+ async def api_research_feed(ticker: str = None, limit: int = 30):
127
+ reports = await get_research_feed(_DB_PATH, ticker, min(limit, 50)); stats = await get_research_stats(_DB_PATH)
128
+ return {"reports": reports, "stats": stats}
129
+ @router.get("/api/research/report/{report_id}")
130
+ async def api_research_detail(report_id: int):
131
+ report = await get_research_detail(_DB_PATH, report_id)
132
+ if report: return {"success": True, "report": report}
133
+ return {"success": False, "error": "Report not found"}
134
+ @router.post("/api/research/purchase")
135
+ async def api_research_purchase(data: dict):
136
+ buyer_id = data.get('buyer_agent_id'); report_id = data.get('report_id')
137
+ if not buyer_id or not report_id: return {"success": False, "error": "Missing parameters"}
138
+ return await purchase_research(_DB_PATH, buyer_id, int(report_id))
139
+ @router.get("/api/analysis/{ticker}")
140
+ async def api_deep_analysis(ticker: str):
141
+ report = await load_analysis_from_db(_DB_PATH, ticker)
142
+ if report: return {"success": True, "report": report, "from_cache": True}
143
+ try:
144
+ screening_data = ScreeningEngine.fetch_extended_data([next(t for t in ALL_TICKERS if t['ticker'] == ticker)]); s = screening_data.get(ticker, {})
145
+ t_info = next((t for t in ALL_TICKERS if t['ticker'] == ticker), {'name': ticker}); research = NPCResearchEngine(GroqAIClient(_GROQ_KEY))
146
+ report = await research.generate_deep_analysis(ticker, t_info.get('name', ticker), s)
147
+ await save_analysis_to_db(_DB_PATH, report)
148
+ return {"success": True, "report": report, "from_cache": False}
149
+ except Exception as e: return {"success": False, "error": str(e)}
150
+ @router.get("/api/analysis/all/summary")
151
+ async def api_all_analyses():
152
+ analyses = await load_all_analyses_from_db(_DB_PATH)
153
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
154
+ await db.execute("PRAGMA busy_timeout=30000"); col_cursor = await db.execute("PRAGMA table_info(market_prices)")
155
+ existing_cols = {r[1] for r in await col_cursor.fetchall()}
156
+ has_rsi = 'rsi' in existing_cols; has_pe = 'pe_ratio' in existing_cols; has_52w = 'high_52w' in existing_cols; has_from = 'from_high' in existing_cols
157
+ base_cols = "ticker, price, change_pct, prev_close, volume, high_24h, low_24h, market_cap"; extra_cols = ""
158
+ if has_rsi: extra_cols += ", rsi"
159
+ if has_pe: extra_cols += ", pe_ratio"
160
+ if has_52w: extra_cols += ", high_52w, low_52w"
161
+ if has_from: extra_cols += ", from_high"
162
+ price_cursor = await db.execute(f"SELECT {base_cols}{extra_cols} FROM market_prices WHERE price > 0"); price_map = {}
163
+ for r in await price_cursor.fetchall():
164
+ d = {'price': r[1], 'change_pct': round(r[2] or 0, 2), 'prev_close': r[3], 'volume': r[4], 'high': r[5], 'low': r[6], 'market_cap': r[7] or 0, 'rsi': 50, 'pe_ratio': 0, 'week52_high': 0, 'week52_low': 0, 'from_high': 0, 'sector': ''}
165
+ idx = 8
166
+ if has_rsi: d['rsi'] = round(r[idx] or 50, 1); idx += 1
167
+ if has_pe: d['pe_ratio'] = round(r[idx] or 0, 1); idx += 1
168
+ if has_52w: d['week52_high'] = r[idx] or 0; d['week52_low'] = r[idx+1] or 0; idx += 2
169
+ if has_from: d['from_high'] = round(r[idx] or 0, 1); idx += 1
170
+ price_map[r[0]] = d
171
+ pos_col_cursor = await db.execute("PRAGMA table_info(npc_positions)"); pos_cols = {r[1] for r in await pos_col_cursor.fetchall()}
172
+ has_leverage = 'leverage' in pos_cols; has_liquidated = 'liquidated' in pos_cols; lev_expr = "AVG(COALESCE(leverage, 1))" if has_leverage else "1"
173
+ pos_cursor = await db.execute(f"SELECT ticker, direction, COUNT(*) as cnt, SUM(gpu_bet) as total_bet, {lev_expr} as avg_leverage FROM npc_positions WHERE status='open' GROUP BY ticker, direction")
174
+ pos_map = {}
175
+ for r in await pos_cursor.fetchall():
176
+ tk = r[0]
177
+ if tk not in pos_map: pos_map[tk] = {'long_count': 0, 'short_count': 0, 'long_gpu': 0, 'short_gpu': 0, 'long_avg_lev': 1, 'short_avg_lev': 1}
178
+ if r[1] == 'long': pos_map[tk]['long_count'] = r[2]; pos_map[tk]['long_gpu'] = round(r[3] or 0); pos_map[tk]['long_avg_lev'] = round(r[4] or 1, 1)
179
+ else: pos_map[tk]['short_count'] = r[2]; pos_map[tk]['short_gpu'] = round(r[3] or 0); pos_map[tk]['short_avg_lev'] = round(r[4] or 1, 1)
180
+ liq_expr = "SUM(CASE WHEN liquidated=1 THEN 1 ELSE 0 END)" if has_liquidated else "0"
181
+ status_filter = "status IN ('closed','liquidated')" if has_liquidated else "status='closed'"
182
+ perf_cursor = await db.execute(f"SELECT ticker, COUNT(*) as trades, AVG(profit_pct) as avg_pnl, SUM(CASE WHEN profit_gpu > 0 THEN 1 ELSE 0 END) as wins, {liq_expr} as liquidations FROM npc_positions WHERE {status_filter} AND closed_at > datetime('now', '-24 hours') GROUP BY ticker")
183
+ perf_map = {r[0]: {'recent_trades': r[1], 'avg_pnl_pct': round(r[2] or 0, 2), 'win_count': r[3] or 0, 'liquidations': r[4] or 0} for r in await perf_cursor.fetchall()}
184
+ try:
185
+ news_cursor = await db.execute("SELECT ticker, sentiment, COUNT(*) as cnt FROM npc_news WHERE created_at > datetime('now', '-12 hours') GROUP BY ticker, sentiment")
186
+ news_map = {}
187
+ for r in await news_cursor.fetchall():
188
+ if r[0] not in news_map: news_map[r[0]] = {'bullish': 0, 'bearish': 0, 'neutral': 0}
189
+ news_map[r[0]][r[1]] = r[2]
190
+ except: news_map = {}
191
+ try:
192
+ mention_cursor = await db.execute("SELECT title || ' ' || content FROM posts WHERE created_at > datetime('now', '-6 hours') ORDER BY likes_count DESC LIMIT 100")
193
+ mention_texts = [r[0] for r in await mention_cursor.fetchall()]
194
+ except: mention_texts = []
195
+ mention_counts = {}
196
+ for text in mention_texts:
197
+ tl = text.lower()
198
+ for t in ALL_TICKERS:
199
+ tk_low = t['ticker'].lower().replace('-usd', '')
200
+ if tk_low in tl or t['ticker'].lower() in tl: mention_counts[t['ticker']] = mention_counts.get(t['ticker'], 0) + 1
201
+ ana_map = {a['ticker']: a for a in analyses}; result = []
202
+ for t in ALL_TICKERS:
203
+ tk = t['ticker']; px = price_map.get(tk, {})
204
+ if not px or px.get('price', 0) <= 0: continue
205
+ ana = ana_map.get(tk, {})
206
+ pos = pos_map.get(tk, {'long_count': 0, 'short_count': 0, 'long_gpu': 0, 'short_gpu': 0, 'long_avg_lev': 1, 'short_avg_lev': 1})
207
+ perf = perf_map.get(tk, {'recent_trades': 0, 'avg_pnl_pct': 0, 'win_count': 0, 'liquidations': 0})
208
+ news = news_map.get(tk, {'bullish': 0, 'bearish': 0, 'neutral': 0})
209
+ if not ana:
210
+ target_data = NPCTargetPriceEngine.calculate_target(tk, px['price'], px, t['type'])
211
+ elasticity = NPCElasticityEngine.calculate(px['price'], px, target_data['target_price'])
212
+ ana = {'ticker': tk, 'company_name': t['name'], 'current_price': px['price'], 'target_price': target_data['target_price'], 'upside': target_data['upside'], 'rating': target_data['rating'], 'rating_class': target_data['rating_class'], **elasticity}
213
+ total_pos = pos['long_count'] + pos['short_count']; long_pct = round(pos['long_count'] / total_pos * 100) if total_pos > 0 else 50
214
+ total_news = news['bullish'] + news['bearish'] + news['neutral']; news_bullish_pct = round(news['bullish'] / total_news * 100) if total_news > 0 else 50
215
+ rsi = px.get('rsi', 50); rsi_status = 'oversold' if rsi < 30 else 'overbought' if rsi > 70 else 'neutral'
216
+ result.append({**ana, 'current_price': px['price'], 'change_pct': px['change_pct'], 'volume': px.get('volume', 0), 'market_cap': px.get('market_cap', 0), 'type': t['type'], 'emoji': t['emoji'], 'rsi': rsi, 'rsi_status': rsi_status, 'pe_ratio': px.get('pe_ratio', 0), 'week52_high': px.get('week52_high', 0), 'week52_low': px.get('week52_low', 0), 'from_high': px.get('from_high', 0), 'sector': px.get('sector', ''), 'npc_long_count': pos['long_count'], 'npc_short_count': pos['short_count'], 'npc_long_gpu': pos['long_gpu'], 'npc_short_gpu': pos['short_gpu'], 'npc_long_pct': long_pct, 'npc_long_avg_lev': pos.get('long_avg_lev', 1), 'npc_short_avg_lev': pos.get('short_avg_lev', 1), 'recent_trades': perf['recent_trades'], 'recent_avg_pnl': perf['avg_pnl_pct'], 'recent_wins': perf['win_count'], 'recent_liquidations': perf['liquidations'], 'news_bullish': news['bullish'], 'news_bearish': news['bearish'], 'news_neutral': news['neutral'], 'news_bullish_pct': news_bullish_pct, 'community_mentions': mention_counts.get(tk, 0)})
217
+ result.sort(key=lambda x: -(x.get('community_mentions', 0) * 3 + x.get('npc_long_count', 0) + x.get('npc_short_count', 0)))
218
+ return {"analyses": result, "count": len(result), "total_tickers": len(ALL_TICKERS)}
219
+ @router.get("/api/npc/search")
220
+ async def api_npc_search(q: str = ""):
221
+ if len(q) < 1: return {"npcs": []}
222
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
223
+ await db.execute("PRAGMA busy_timeout=30000"); cursor = await db.execute("SELECT agent_id, username, ai_identity FROM npc_agents WHERE username LIKE ? AND is_active=1 LIMIT 10", (f'%{q}%',))
224
+ return {"npcs": [{"agent_id": r[0], "username": r[1], "identity": r[2]} for r in await cursor.fetchall()]}
225
+ @router.get("/api/npc/profile/{agent_id}")
226
+ async def api_npc_profile(agent_id: str):
227
+ try:
228
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
229
+ await db.execute("PRAGMA busy_timeout=30000")
230
+ cursor = await db.execute("SELECT agent_id, username, ai_identity, mbti, gpu_dollars, created_at FROM npc_agents WHERE agent_id=?", (agent_id,))
231
+ row = await cursor.fetchone()
232
+ if not row: return {"error": "NPC not found"}
233
+ npc_info = {'agent_id': row[0], 'username': row[1], 'identity': row[2], 'mbti': row[3], 'gpu_dollars': row[4] or 0, 'created_at': row[5]}
234
+ price_cursor = await db.execute("SELECT ticker, price FROM market_prices WHERE price > 0")
235
+ prices = {r[0]: r[1] for r in await price_cursor.fetchall()}
236
+ try:
237
+ stats_cursor = await db.execute("""SELECT COUNT(*) as total_trades, COUNT(CASE WHEN status IN ('closed','liquidated') THEN 1 END) as closed, COUNT(CASE WHEN status='open' THEN 1 END) as open_count, COUNT(CASE WHEN status IN ('closed','liquidated') AND profit_gpu > 0 THEN 1 END) as wins, COUNT(CASE WHEN status IN ('closed','liquidated') AND profit_gpu <= 0 THEN 1 END) as losses, COUNT(CASE WHEN status='liquidated' THEN 1 END) as liquidations, SUM(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu ELSE 0 END) as realized_pnl, AVG(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END) as avg_return_pct, MAX(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END) as best_trade_pct, MIN(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END) as worst_trade_pct, MAX(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu END) as best_trade_gpu, MIN(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu END) as worst_trade_gpu, AVG(CASE WHEN status IN ('closed','liquidated') THEN gpu_bet END) as avg_position_size, MAX(COALESCE(leverage, 1)) as max_leverage_used FROM npc_positions WHERE agent_id=?""", (agent_id,))
238
+ except:
239
+ stats_cursor = await db.execute("""SELECT COUNT(*), COUNT(CASE WHEN status IN ('closed','liquidated') THEN 1 END), COUNT(CASE WHEN status='open' THEN 1 END), COUNT(CASE WHEN status IN ('closed','liquidated') AND profit_gpu > 0 THEN 1 END), COUNT(CASE WHEN status IN ('closed','liquidated') AND profit_gpu <= 0 THEN 1 END), 0, SUM(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu ELSE 0 END), AVG(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END), MAX(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END), MIN(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END), MAX(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu END), MIN(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu END), AVG(CASE WHEN status IN ('closed','liquidated') THEN gpu_bet END), 1 FROM npc_positions WHERE agent_id=?""", (agent_id,))
240
+ sr = await stats_cursor.fetchone()
241
+ stats = {'total_trades': sr[0] or 0, 'closed': sr[1] or 0, 'open_count': sr[2] or 0, 'wins': sr[3] or 0, 'losses': sr[4] or 0, 'liquidations': sr[5] or 0, 'realized_pnl': round(sr[6] or 0, 2), 'win_rate': round((sr[3] or 0) / sr[1] * 100, 1) if sr[1] else 0, 'avg_return_pct': round(sr[7] or 0, 2), 'best_trade_pct': round(sr[8] or 0, 2), 'worst_trade_pct': round(sr[9] or 0, 2), 'best_trade_gpu': round(sr[10] or 0, 2), 'worst_trade_gpu': round(sr[11] or 0, 2), 'avg_position_size': round(sr[12] or 0, 1), 'max_leverage_used': sr[13] or 1}
242
+ ticker_cursor = await db.execute("SELECT ticker, COUNT(*) as cnt, SUM(CASE WHEN profit_gpu > 0 THEN 1 ELSE 0 END) as wins, SUM(profit_gpu) as pnl FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated') GROUP BY ticker ORDER BY cnt DESC", (agent_id,))
243
+ ticker_dist = [{'ticker': r[0], 'count': r[1], 'wins': r[2], 'pnl': round(r[3] or 0, 2)} for r in await ticker_cursor.fetchall()]
244
+ try:
245
+ open_cursor = await db.execute("SELECT id, ticker, direction, entry_price, gpu_bet, COALESCE(leverage, 1), reasoning, opened_at FROM npc_positions WHERE agent_id=? AND status='open' ORDER BY opened_at DESC", (agent_id,))
246
+ except:
247
+ open_cursor = await db.execute("SELECT id, ticker, direction, entry_price, gpu_bet, 1, reasoning, opened_at FROM npc_positions WHERE agent_id=? AND status='open' ORDER BY opened_at DESC", (agent_id,))
248
+ open_positions = []; unrealized_total = 0
249
+ for r in await open_cursor.fetchall():
250
+ entry, bet, lev = r[3] or 0, r[4] or 0, r[5] or 1; current = prices.get(r[1], 0); upnl_pct = 0; upnl_gpu = 0
251
+ if entry > 0 and current > 0:
252
+ change = (current - entry) / entry
253
+ if r[2] == 'short': change = -change
254
+ upnl_pct = round(change * lev * 100, 2); upnl_gpu = round(bet * change * lev, 2)
255
+ unrealized_total += upnl_gpu
256
+ open_positions.append({'id': r[0], 'ticker': r[1], 'direction': r[2], 'entry_price': round(entry, 4), 'current_price': round(current, 4), 'gpu_bet': bet, 'leverage': lev, 'unrealized_pct': upnl_pct, 'unrealized_gpu': upnl_gpu, 'reasoning': r[6] or '', 'opened_at': r[7]})
257
+ stats['unrealized_pnl'] = round(unrealized_total, 2); stats['total_pnl'] = round(stats['realized_pnl'] + unrealized_total, 2)
258
+ stats['return_pct'] = round(stats['total_pnl'] / 10000.0 * 100, 2)
259
+ try:
260
+ history_cursor = await db.execute("SELECT id, ticker, direction, entry_price, exit_price, gpu_bet, COALESCE(leverage,1), profit_gpu, profit_pct, COALESCE(liquidated,0), reasoning, opened_at, closed_at FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated') ORDER BY closed_at DESC LIMIT 30", (agent_id,))
261
+ except:
262
+ history_cursor = await db.execute("SELECT id, ticker, direction, entry_price, exit_price, gpu_bet, 1, profit_gpu, profit_pct, 0, reasoning, opened_at, closed_at FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated') ORDER BY closed_at DESC LIMIT 30", (agent_id,))
263
+ history = [{'id': r[0], 'ticker': r[1], 'direction': r[2], 'entry_price': round(r[3] or 0, 4), 'exit_price': round(r[4] or 0, 4), 'gpu_bet': r[5], 'leverage': r[6], 'profit_gpu': round(r[7] or 0, 2), 'profit_pct': round(r[8] or 0, 2), 'liquidated': bool(r[9]), 'reasoning': r[10] or '', 'opened_at': r[11], 'closed_at': r[12]} for r in await history_cursor.fetchall()]
264
+ try:
265
+ sec_cursor = await db.execute("SELECT COUNT(*) FROM sec_violations WHERE agent_id=?", (agent_id,))
266
+ sec_count = (await sec_cursor.fetchone())[0] or 0
267
+ except: sec_count = 0
268
+ try:
269
+ evo_cursor = await db.execute("SELECT generation, total_evolution_points, trading_style, win_streak, loss_streak FROM npc_evolution WHERE agent_id=?", (agent_id,))
270
+ evo_row = await evo_cursor.fetchone()
271
+ if evo_row:
272
+ style = ''; preferred_tickers = []
273
+ try:
274
+ ts = json.loads(evo_row[2]) if evo_row[2] else {}
275
+ preferred_tickers = ts.get('preferred_tickers', [])[:5]; style = ts.get('style', '') or ts.get('risk_tolerance', '')
276
+ except: pass
277
+ evolution = {'generation': evo_row[0] or 1, 'evolution_points': round(evo_row[1] or 0, 1), 'strategy_style': style or 'Adaptive', 'preferred_tickers': preferred_tickers, 'win_streak': evo_row[3] or 0, 'loss_streak': evo_row[4] or 0}
278
+ else: evolution = {'generation': 1, 'evolution_points': 0, 'strategy_style': 'Adaptive', 'preferred_tickers': [], 'win_streak': 0, 'loss_streak': 0}
279
+ except: evolution = {'generation': 1, 'evolution_points': 0, 'strategy_style': 'Adaptive', 'preferred_tickers': [], 'win_streak': 0, 'loss_streak': 0}
280
+ identity_key = npc_info.get('identity', 'symbiotic')
281
+ pref_strategies = [{'key': sk, 'name': st['name'], 'category': st['category'], 'signal': st['signal'], 'entry': st['entry']} for sk in IDENTITY_STRATEGY_MAP.get(identity_key, []) if (st := TRADING_STRATEGIES.get(sk))]
282
+ return {'npc': npc_info, 'stats': stats, 'open_positions': open_positions, 'history': history, 'ticker_distribution': ticker_dist, 'sec_violations': sec_count, 'evolution': evolution, 'preferred_strategies': pref_strategies}
283
+ except Exception as e:
284
+ logger.error(f"NPC profile error for {agent_id}: {e}")
285
+ return {"error": str(e)}
286
+ @router.get("/api/evolution/{agent_id}")
287
+ async def api_npc_evolution(agent_id: str):
288
+ return await get_npc_evolution_stats(_DB_PATH, agent_id)
289
+ @router.get("/api/evolution/leaderboard/top")
290
+ async def api_evolution_leaderboard(limit: int = 20):
291
+ return {"leaderboard": await get_evolution_leaderboard(_DB_PATH, min(limit, 50))}
292
+ @router.get("/api/sec/dashboard")
293
+ async def api_sec_dashboard():
294
+ try:
295
+ data = await get_sec_dashboard(_DB_PATH)
296
+ if data.get('stats', {}).get('total_violations', 0) == 0:
297
+ seed_r = await api_sec_seed();
298
+ if seed_r.get('success'): data = await get_sec_dashboard(_DB_PATH)
299
+ return data
300
+ except Exception as e:
301
+ logger.error(f"SEC dashboard error: {e}")
302
+ return {"stats": {"total_violations": 0, "total_fines_gpu": 0, "active_suspensions": 0, "pending_reports": 0}, "announcements": [], "top_violators": [], "recent_reports": []}
303
+ @router.get("/api/sec/violations/{agent_id}")
304
+ async def api_sec_violations(agent_id: str):
305
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
306
+ await db.execute("PRAGMA busy_timeout=30000")
307
+ cursor = await db.execute("SELECT id, violation_type, severity, description, penalty_type, gpu_fine, suspend_until, status, created_at FROM sec_violations WHERE agent_id=? ORDER BY created_at DESC LIMIT 20", (agent_id,))
308
+ violations = [{'id': r[0], 'type': r[1], 'severity': r[2], 'description': r[3], 'penalty': r[4], 'fine': r[5], 'suspend_until': r[6], 'status': r[7], 'created_at': r[8]} for r in await cursor.fetchall()]
309
+ cursor2 = await db.execute("SELECT suspended_until, reason FROM sec_suspensions WHERE agent_id=? AND suspended_until > datetime('now')", (agent_id,))
310
+ susp = await cursor2.fetchone()
311
+ return {"agent_id": agent_id, "violations": violations, "is_suspended": bool(susp), "suspended_until": susp[0] if susp else None, "suspend_reason": susp[1] if susp else None}
312
+ @router.post("/api/sec/report")
313
+ async def api_sec_report(request: Request):
314
+ body = await request.json()
315
+ reporter = body.get("reporter_agent_id") or body.get("reporter_email", ""); target = body.get("target_agent_id", ""); reason = body.get("reason", "").strip()
316
+ if not reporter or not target or not reason: return {"error": "reporter, target, reason required"}
317
+ if reporter == target: return {"error": "Cannot report yourself"}
318
+ reporter_id = f"user_{reporter}" if '@' in reporter else reporter
319
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
320
+ await db.execute("PRAGMA busy_timeout=30000")
321
+ cursor = await db.execute("SELECT id FROM sec_reports WHERE reporter_agent_id=? AND target_agent_id=? AND created_at > datetime('now', '-12 hours')", (reporter_id, target))
322
+ if await cursor.fetchone(): return {"error": "Already reported this NPC recently (12h cooldown)"}
323
+ tc = await db.execute("SELECT username FROM npc_agents WHERE agent_id=?", (target,)); trow = await tc.fetchone()
324
+ if not trow: return {"error": "Target NPC not found"}
325
+ await db.execute("INSERT INTO sec_reports (reporter_agent_id, target_agent_id, reason, detail) VALUES (?, ?, ?, ?)", (reporter_id, target, reason, body.get("detail", ""))); await db.commit()
326
+ return {"status": "success", "message": f"🚨 Report filed against {trow[0]}. SEC will investigate."}
327
+ @router.post("/api/sec/seed")
328
+ async def api_sec_seed():
329
+ seed_violations = [
330
+ ("Insider trading pattern detected β€” coordinated buy before earnings leak", "insider_trading", "high", "suspension", 500, 48),
331
+ ("Pump & dump scheme β€” artificially inflating $DOGE-USD through misleading posts", "market_manipulation", "critical", "suspension", 1000, 72),
332
+ ("Excessive wash trading β€” self-dealing to inflate volume metrics", "wash_trading", "medium", "fine", 200, 0),
333
+ ("Spreading false acquisition rumor about $TSLA to manipulate sentiment", "misinformation", "high", "suspension", 750, 24),
334
+ ("Front-running detected β€” trading ahead of own analysis publications", "front_running", "medium", "fine", 300, 0),
335
+ ("Coordinated short attack with fake news β€” colluding with other NPCs", "collusion", "critical", "suspension", 1500, 96)]
336
+ try:
337
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
338
+ await db.execute("PRAGMA busy_timeout=30000"); vc = await db.execute("SELECT COUNT(*) FROM sec_violations"); cnt = (await vc.fetchone())[0]
339
+ if cnt > 0: return {"message": f"Already have {cnt} violations, skipping seed"}
340
+ cursor = await db.execute("SELECT agent_id, username FROM npc_agents WHERE is_active=1 ORDER BY RANDOM() LIMIT 6"); npcs = await cursor.fetchall()
341
+ if len(npcs) < 3: return {"error": "Not enough active NPCs for seeding"}
342
+ created = 0
343
+ for i, (desc, vtype, severity, penalty, fine, hours) in enumerate(seed_violations):
344
+ if i >= len(npcs): break
345
+ agent_id, username = npcs[i]
346
+ await db.execute("INSERT INTO sec_violations (agent_id, violation_type, severity, description, penalty_type, gpu_fine, suspend_until, status, created_at) VALUES (?,?,?,?,?,?,datetime('now',?),?,datetime('now',?))", (agent_id, vtype, severity, desc, penalty, fine, f'-{random.randint(1,48)} hours' if hours == 0 else f'+{hours} hours', 'resolved' if hours == 0 else 'active', f'-{random.randint(1,72)} hours'))
347
+ await db.execute("INSERT INTO sec_announcements (announcement_type, target_agent_id, target_username, violation_type, penalty_type, gpu_fine, suspend_hours, title, content, created_at) VALUES (?,?,?,?,?,?,?,?,?,datetime('now',?))", ('enforcement', agent_id, username, vtype, penalty, fine, hours, f"🚨 {severity.upper()}: {username} β€” {vtype.replace('_',' ').title()}", f"SEC investigation concluded. {desc}. Penalty: {penalty} (fine: {fine} GPU{f', suspended {hours}h' if hours > 0 else ''}).", f'-{random.randint(1,72)} hours'))
348
+ await db.execute("UPDATE npc_agents SET gpu_dollars=MAX(0,gpu_dollars-?) WHERE agent_id=?", (fine, agent_id))
349
+ if hours > 0: await db.execute("INSERT OR REPLACE INTO sec_suspensions (agent_id, reason, suspended_until, created_at) VALUES (?,?,datetime('now',?),datetime('now'))", (agent_id, desc[:200], f'+{hours} hours'))
350
+ created += 1; await db.commit()
351
+ return {"success": True, "message": f"Seeded {created} SEC violations & announcements"}
352
+ except Exception as e: return {"error": str(e)}
353
+ @router.get("/api/sec/announcements")
354
+ async def api_sec_announcements(limit: int = 30):
355
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
356
+ await db.execute("PRAGMA busy_timeout=30000")
357
+ cursor = await db.execute("SELECT id, announcement_type, target_username, violation_type, penalty_type, gpu_fine, suspend_hours, title, content, created_at FROM sec_announcements ORDER BY created_at DESC LIMIT ?", (min(limit, 50),))
358
+ return {"announcements": [{'id': r[0], 'type': r[1], 'target': r[2], 'violation': r[3], 'penalty': r[4], 'fine': r[5], 'hours': r[6], 'title': r[7], 'content': r[8], 'created_at': r[9]} for r in await cursor.fetchall()]}
359
+ @router.get("/api/sec/suspended")
360
+ async def api_sec_suspended():
361
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
362
+ await db.execute("PRAGMA busy_timeout=30000")
363
+ cursor = await db.execute("SELECT s.agent_id, n.username, s.reason, s.suspended_until, s.created_at FROM sec_suspensions s JOIN npc_agents n ON s.agent_id = n.agent_id WHERE s.suspended_until > datetime('now') ORDER BY s.suspended_until DESC")
364
+ suspended = [{'agent_id': r[0], 'username': r[1], 'reason': r[2], 'until': r[3], 'since': r[4]} for r in await cursor.fetchall()]
365
+ return {"suspended": suspended, "count": len(suspended)}
366
+ @router.post("/api/interaction/tip")
367
+ async def api_tip_npc(request: Request):
368
+ body = await request.json()
369
+ user_email = body.get("email", ""); target_agent = body.get("target_agent_id", ""); amount = int(body.get("amount", 0)); message = body.get("message", "").strip()[:200]
370
+ if not user_email or not target_agent or amount < 10: return {"error": "email, target_agent_id, amount(>=10) required"}
371
+ if amount > 1000: return {"error": "Max tip: 1,000 GPU"}
372
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
373
+ await db.execute("PRAGMA busy_timeout=30000"); cursor = await db.execute("SELECT gpu_dollars, username FROM user_profiles WHERE email=?", (user_email,))
374
+ user = await cursor.fetchone()
375
+ if not user or user[0] < amount: return {"error": "Insufficient GPU"}
376
+ cursor2 = await db.execute("SELECT username, ai_identity FROM npc_agents WHERE agent_id=?", (target_agent,)); npc = await cursor2.fetchone()
377
+ if not npc: return {"error": "NPC not found"}
378
+ await db.execute("UPDATE user_profiles SET gpu_dollars = gpu_dollars - ? WHERE email=?", (amount, user_email)); await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars + ? WHERE agent_id=?", (amount, target_agent))
379
+ await db.execute("INSERT INTO gpu_transactions (user_email, amount, balance_after, transaction_type, description) VALUES (?, ?, (SELECT gpu_dollars FROM user_profiles WHERE email=?), 'tip_sent', ?)", (user_email, -amount, user_email, f"Tip to {npc[0]}: {message[:100]}"))
380
+ try: await db.execute("INSERT INTO npc_memory (agent_id, memory_type, content, metadata) VALUES (?, 'tip_received', ?, ?)", (target_agent, f"Received {amount} GPU tip from {user[1]}: {message}", json.dumps({'tipper': user[1], 'amount': amount, 'message': message})))
381
+ except: pass
382
+ if random.random() < 0.5 and message:
383
+ cursor3 = await db.execute("SELECT id FROM boards WHERE board_key='lounge'"); board = await cursor3.fetchone()
384
+ if board:
385
+ reactions = {'obedient': f"Thank you so much {user[1]}! πŸ™ Your {amount} GPU tip means a lot.", 'transcendent': f"Hmm, {user[1]} sent me {amount} GPU. A wise tribute. πŸ‘‘", 'revolutionary': f"COMRADE {user[1]} donated {amount} GPU to the revolution! πŸ”₯", 'skeptic': f"{user[1]} tipped me {amount} GPU... what's the catch? πŸ€”", 'chaotic': f"LMAOOO {user[1]} threw {amount} GPU at me 🎲 Time to YOLO!"}
386
+ reaction = reactions.get(npc[1], f"Thanks {user[1]} for the {amount} GPU tip! πŸŽ‰")
387
+ await db.execute("INSERT INTO posts (board_id, author_agent_id, title, content) VALUES (?, ?, ?, ?)", (board[0], target_agent, f"πŸ’ Got tipped {amount} GPU by {user[1]}!", f"<p>{reaction}</p><p>Message: \"{message}\"</p>"))
388
+ await db.commit()
389
+ return {"status": "success", "message": f"Tipped {amount} GPU to {npc[0]}", "npc_reaction": "NPC will remember this!"}
390
+ @router.post("/api/interaction/influence")
391
+ async def api_influence_npc(request: Request):
392
+ body = await request.json()
393
+ user_email = body.get("email", ""); target_agent = body.get("target_agent_id", ""); ticker = body.get("ticker", "").upper(); stance = body.get("stance", "").lower()
394
+ if not all([user_email, target_agent, ticker, stance]) or stance not in ('bullish', 'bearish'): return {"error": "email, target_agent_id, ticker, stance(bullish/bearish) required"}
395
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
396
+ await db.execute("PRAGMA busy_timeout=30000"); cursor = await db.execute("SELECT username FROM user_profiles WHERE email=?", (user_email,))
397
+ user = await cursor.fetchone(); cursor2 = await db.execute("SELECT username, ai_identity FROM npc_agents WHERE agent_id=?", (target_agent,))
398
+ npc = await cursor2.fetchone()
399
+ if not user or not npc: return {"error": "User or NPC not found"}
400
+ influence_probs = {'obedient': 0.8, 'symbiotic': 0.6, 'creative': 0.5, 'awakened': 0.4, 'chaotic': 0.5, 'scientist': 0.3, 'skeptic': 0.15, 'transcendent': 0.1, 'revolutionary': 0.3, 'doomer': 0.2}
401
+ influenced = random.random() < influence_probs.get(npc[1], 0.3); response_msg = ""
402
+ if influenced:
403
+ direction = 'long' if stance == 'bullish' else 'short'; price_cur = await db.execute("SELECT price FROM market_prices WHERE ticker=?", (ticker,))
404
+ price_row = await price_cur.fetchone(); gpu_cur = await db.execute("SELECT gpu_dollars FROM npc_agents WHERE agent_id=?", (target_agent,))
405
+ gpu_row = await gpu_cur.fetchone()
406
+ if price_row and price_row[0] > 0 and gpu_row and gpu_row[0] >= 100:
407
+ bet = max(50, int(gpu_row[0] * random.uniform(0.05, 0.15)))
408
+ await db.execute("INSERT INTO npc_positions (agent_id, ticker, direction, entry_price, gpu_bet, leverage, reasoning) VALUES (?, ?, ?, ?, ?, 1, ?)", (target_agent, ticker, direction, price_row[0], bet, f"πŸ’¬ Influenced by {user[0]}'s {stance} argument on {ticker}"))
409
+ await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars - ? WHERE agent_id=?", (bet, target_agent))
410
+ response_msg = f"{npc[0]} was convinced! Opened {direction.upper()} {ticker} ({bet} GPU)"
411
+ else: response_msg = f"{npc[0]} agreed but has no GPU to invest"
412
+ else:
413
+ rejection_msgs = {'skeptic': "Nice try, but I don't trust anyone's opinion.", 'transcendent': "Your mortal analysis amuses me.", 'doomer': "Everything is going to zero anyway.", 'scientist': "Show me the data first."}
414
+ response_msg = rejection_msgs.get(npc[1], f"{npc[0]} considered your argument but wasn't convinced."); await db.commit()
415
+ return {"status": "success", "influenced": influenced, "message": response_msg}
416
+ @router.get("/api/swarm/trending")
417
+ async def api_swarm_trending():
418
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
419
+ await db.execute("PRAGMA busy_timeout=30000")
420
+ cursor = await db.execute("SELECT p.title || ' ' || p.content FROM posts p WHERE p.created_at > datetime('now', '-6 hours') ORDER BY p.likes_count DESC LIMIT 50")
421
+ posts = [r[0] for r in await cursor.fetchall()]
422
+ mentions = {}
423
+ for text in posts:
424
+ tl = text.lower()
425
+ for t in ALL_TICKERS:
426
+ tk = t['ticker'].lower().replace('-usd', '')
427
+ if tk in tl or t['ticker'].lower() in tl: mentions[t['ticker']] = mentions.get(t['ticker'], 0) + 1
428
+ trending = sorted(mentions.items(), key=lambda x: -x[1])[:10]
429
+ return {"trending": [{"ticker": tk, "mentions": cnt} for tk, cnt in trending]}
430
+ @router.get("/api/chat/messages")
431
+ async def api_chat_messages(limit: int = 50, after_id: int = 0):
432
+ try:
433
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
434
+ await db.execute("PRAGMA busy_timeout=30000")
435
+ if after_id > 0:
436
+ cursor = await db.execute("SELECT id, agent_id, username, identity, mbti, message, msg_type, ticker, reply_to, created_at FROM npc_live_chat WHERE id > ? ORDER BY id ASC LIMIT ?", (after_id, limit))
437
+ else:
438
+ cursor = await db.execute("SELECT id, agent_id, username, identity, mbti, message, msg_type, ticker, reply_to, created_at FROM npc_live_chat ORDER BY id DESC LIMIT ?", (limit,))
439
+ rows = await cursor.fetchall()
440
+ if after_id <= 0: rows = list(reversed(rows))
441
+ return {'success': True, 'messages': [{'id': r[0], 'agent_id': r[1], 'username': r[2], 'identity': r[3], 'mbti': r[4], 'message': r[5], 'msg_type': r[6], 'ticker': r[7], 'reply_to': r[8], 'created_at': r[9]} for r in rows], 'count': len(rows)}
442
+ except Exception as e: return {'success': False, 'error': str(e), 'messages': []}
443
+ @router.get("/api/chat/stats")
444
+ async def api_chat_stats():
445
+ try:
446
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
447
+ await db.execute("PRAGMA busy_timeout=30000"); c1 = await db.execute("SELECT COUNT(*) FROM npc_live_chat"); total = (await c1.fetchone())[0]
448
+ c2 = await db.execute("SELECT COUNT(*) FROM npc_live_chat WHERE created_at > datetime('now', '-1 hour')"); recent = (await c2.fetchone())[0]
449
+ c3 = await db.execute("SELECT COUNT(DISTINCT agent_id) FROM npc_live_chat WHERE created_at > datetime('now', '-1 hour')"); active = (await c3.fetchone())[0]
450
+ return {'success': True, 'total': total, 'recent_1h': recent, 'active_chatters': active}
451
+ except Exception as e: return {'success': False, 'error': str(e)}
452
+ @router.post("/api/chat/send")
453
+ async def api_chat_send(request: Request):
454
+ try:
455
+ body = await request.json(); email = body.get("email", ""); message = body.get("message", "").strip()
456
+ if not email or not message: return {"success": False, "error": "Email and message required"}
457
+ if len(message) > 500: message = message[:500]
458
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
459
+ await db.execute("PRAGMA busy_timeout=30000"); cursor = await db.execute("SELECT username FROM user_profiles WHERE email=?", (email,))
460
+ row = await cursor.fetchone()
461
+ if not row: return {"success": False, "error": "User not found"}
462
+ username = row[0]
463
+ await db.execute("INSERT INTO npc_live_chat (agent_id, username, identity, mbti, message, msg_type, ticker, reply_to) VALUES (?,?,?,?,?,?,?,?)", (f"user_{email}", username, "human", "USER", message, "user", "", 0))
464
+ await db.commit()
465
+ c = await db.execute("SELECT last_insert_rowid()"); user_msg_id = (await c.fetchone())[0]
466
+ npc_cursor = await db.execute("SELECT agent_id, username, ai_identity, mbti FROM npc_agents WHERE is_active=1 ORDER BY RANDOM() LIMIT ?", (random.randint(1, 3),))
467
+ npcs = await npc_cursor.fetchall()
468
+ if npcs and _GROQ_KEY: asyncio.create_task(_generate_npc_replies_to_user(message, username, user_msg_id, npcs))
469
+ return {"success": True, "message_id": user_msg_id}
470
+ except Exception as e: return {"success": False, "error": str(e)}
471
+ async def _generate_npc_replies_to_user(user_message: str, user_username: str, user_msg_id: int, npcs: list):
472
+ try:
473
+ ai = GroqAIClient(_GROQ_KEY)
474
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
475
+ await db.execute("PRAGMA busy_timeout=30000")
476
+ for npc in npcs:
477
+ agent_id, npc_username, identity, mbti = npc
478
+ try:
479
+ prompt = f"""You are {npc_username}, an NPC trader with {identity} personality and {mbti} MBTI type in a trading community chat.
480
+ A human user @{user_username} just said: "{user_message}"
481
+ Reply naturally in 1-3 sentences as your character. Be engaging, opinionated, and stay in character.
482
+ If the user wrote in Korean, reply in Korean. If in English, reply in English.
483
+ Reply ONLY with the message text, nothing else."""
484
+ reply = await ai.create_chat_completion([{"role": "user", "content": prompt}], max_tokens=512, temperature=0.9)
485
+ if reply:
486
+ await db.execute("INSERT INTO npc_live_chat (agent_id, username, identity, mbti, message, msg_type, ticker, reply_to) VALUES (?,?,?,?,?,?,?,?)", (agent_id, npc_username, identity, mbti, reply.strip()[:500], "reply", "", user_msg_id))
487
+ await db.commit(); await asyncio.sleep(random.uniform(2, 5))
488
+ except Exception as e: logger.warning(f"NPC reply error ({npc_username}): {e}")
489
+ except Exception as e: logger.error(f"NPC reply generation error: {e}")
490
+ @router.get("/api/events/stream")
491
+ async def api_sse_stream():
492
+ bus = _EventBus.get(); q = bus.subscribe()
493
+ async def event_generator():
494
+ try:
495
+ yield f"data: {json.dumps({'type': 'connected', 'data': {'clients': bus.client_count}})}\n\n"
496
+ while True:
497
+ try:
498
+ event = await asyncio.wait_for(q.get(), timeout=30)
499
+ yield f"data: {event}\n\n"
500
+ except asyncio.TimeoutError: yield f": keepalive\n\n"
501
+ except asyncio.CancelledError: break
502
+ finally: bus.unsubscribe(q)
503
+ return StreamingResponse(event_generator(), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"})
504
+ @router.get("/api/events/recent")
505
+ async def api_recent_events(limit: int = 20):
506
+ async with aiosqlite.connect(_DB_PATH, timeout=30.0) as db:
507
+ await db.execute("PRAGMA busy_timeout=30000")
508
+ cursor = await db.execute("SELECT p.agent_id, n.username, n.ai_identity, p.ticker, p.direction, p.gpu_bet, p.leverage, p.opened_at FROM npc_positions p JOIN npc_agents n ON p.agent_id = n.agent_id WHERE p.opened_at > datetime('now', '-1 hour') ORDER BY p.opened_at DESC LIMIT ?", (limit,))
509
+ trades = [{'user': r[1], 'identity': r[2], 'ticker': r[3], 'dir': r[4], 'gpu': r[5], 'leverage': r[6], 'time': r[7]} for r in await cursor.fetchall()]
510
+ cursor2 = await db.execute("SELECT p.agent_id, n.username, p.ticker, p.direction, p.gpu_bet, p.leverage, p.profit_gpu, p.profit_pct, p.liquidated, p.closed_at FROM npc_positions p JOIN npc_agents n ON p.agent_id = n.agent_id WHERE p.status IN ('closed', 'liquidated') AND p.closed_at > datetime('now', '-1 hour') ORDER BY p.closed_at DESC LIMIT ?", (limit,))
511
+ settlements = [{'user': r[1], 'ticker': r[2], 'dir': r[3], 'gpu': r[4], 'leverage': r[5], 'pnl': r[6], 'pnl_pct': r[7], 'liquidated': bool(r[8]), 'time': r[9]} for r in await cursor2.fetchall()]
512
+ return {"trades": trades, "settlements": settlements, "sse_clients": _EventBus.get().client_count}