Spaces:
Running
Running
Update simulation_engine/mock_kucoin.py
Browse files- simulation_engine/mock_kucoin.py +38 -68
simulation_engine/mock_kucoin.py
CHANGED
|
@@ -1,130 +1,101 @@
|
|
| 1 |
# simulation_engine/mock_kucoin.py
|
| 2 |
-
# (V1.
|
| 3 |
|
| 4 |
import pandas as pd
|
| 5 |
import numpy as np
|
| 6 |
import os
|
| 7 |
-
import asyncio
|
| 8 |
|
| 9 |
class MockKuCoin:
|
| 10 |
def __init__(self, raw_data_dir):
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
raw_data_dir: المجلد الذي يحتوي على ملفات .parquet (مثل BTC_USDT_5m.parquet)
|
| 14 |
-
"""
|
| 15 |
-
self.data_store = {} # الذاكرة السريعة للبيانات
|
| 16 |
-
self.current_time_ms = 0 # ساعة المحاكاة
|
| 17 |
self.raw_dir = raw_data_dir
|
| 18 |
-
|
|
|
|
| 19 |
|
| 20 |
async def load_data(self, symbols, timeframes):
|
| 21 |
-
""
|
| 22 |
-
تحميل البيانات المطلوبة مسبقاً إلى الذاكرة لتسريع المحاكاة.
|
| 23 |
-
"""
|
| 24 |
-
print(f"⏳ [MockKuCoin] Loading historical data for {len(symbols)} symbols...")
|
| 25 |
count = 0
|
| 26 |
for sym in symbols:
|
| 27 |
safe_sym = sym.replace('/', '')
|
| 28 |
self.data_store[sym] = {}
|
| 29 |
for tf in timeframes:
|
| 30 |
-
# محاولة العثور على الملف بتنسيقات مختلفة محتملة
|
| 31 |
paths_to_try = [
|
| 32 |
os.path.join(self.raw_dir, f"{safe_sym}_{tf}.parquet"),
|
| 33 |
os.path.join(self.raw_dir, f"{sym.replace('/', '')}_{tf}.parquet")
|
| 34 |
]
|
| 35 |
-
|
| 36 |
loaded = False
|
| 37 |
for path in paths_to_try:
|
| 38 |
if os.path.exists(path):
|
| 39 |
try:
|
| 40 |
df = pd.read_parquet(path)
|
| 41 |
-
# التأكد من الترتيب الزمني وإزالة التكرارات
|
| 42 |
df = df.sort_values('timestamp').drop_duplicates('timestamp').reset_index(drop=True)
|
| 43 |
self.data_store[sym][tf] = df
|
| 44 |
loaded = True
|
| 45 |
count += 1
|
|
|
|
| 46 |
break
|
| 47 |
except Exception as e:
|
| 48 |
-
print(f"⚠️ [MockKuCoin] Error loading {path}: {e}")
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
print(f"✅ [MockKuCoin] Loaded {count} data files into memory.")
|
| 54 |
|
| 55 |
def set_time(self, timestamp_ms):
|
| 56 |
-
"""ضبط ساعة المحاكاة الحالية (تحريك الزمن)"""
|
| 57 |
self.current_time_ms = timestamp_ms
|
| 58 |
|
| 59 |
async def fetch_ohlcv(self, symbol, timeframe, limit=500, since=None):
|
| 60 |
-
|
| 61 |
-
محاكاة دالة ccxt.fetch_ohlcv.
|
| 62 |
-
تعيد فقط الشموع التي 'أغلقت' قبل الوقت الحالي للمحاكاة.
|
| 63 |
-
"""
|
| 64 |
if symbol not in self.data_store or timeframe not in self.data_store[symbol]:
|
|
|
|
|
|
|
|
|
|
| 65 |
return []
|
| 66 |
-
|
| 67 |
df = self.data_store[symbol][timeframe]
|
| 68 |
-
if df.empty:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 69 |
|
| 70 |
-
# 1. تحديد النقطة الزمنية الحالية في البيانات
|
| 71 |
-
# نستخدم searchsorted للعثور بسرعة على آخر شمعة متاحة في هذا الوقت الوهمي
|
| 72 |
-
# end_idx هو أول إندكس زمنه أكبر من self.current_time_ms
|
| 73 |
end_idx = np.searchsorted(df['timestamp'].values, self.current_time_ms, side='right')
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
|
|
|
| 79 |
start_idx = 0
|
| 80 |
-
if since:
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
# 3. تطبيق حد 'limit'
|
| 84 |
-
# نأخذ آخر 'limit' شمعة قبل end_idx، مع ضمان عدم العودة إلى ما قبل start_idx
|
| 85 |
-
|
| 86 |
-
# نقطة البداية المطلوبة بواسطة Limit
|
| 87 |
limit_start_idx = max(0, end_idx - limit)
|
| 88 |
-
|
| 89 |
-
# نختار الأبعد بين ما يطلبه 'since' وما يطلبه 'limit'
|
| 90 |
final_start_idx = max(start_idx, limit_start_idx)
|
| 91 |
-
|
| 92 |
-
# 4. التحقق الحاسم للإرجاع
|
| 93 |
if final_start_idx >= end_idx:
|
| 94 |
-
|
|
|
|
|
|
|
| 95 |
return []
|
| 96 |
-
|
| 97 |
-
# 5. استخراج البيانات وتحويلها لتنسيق القائمة (List of Lists)
|
| 98 |
-
# التنسيق المتوقع: [timestamp, open, high, low, close, volume]
|
| 99 |
subset = df.iloc[final_start_idx:end_idx]
|
| 100 |
-
|
| 101 |
-
# تحويل سريع باستخدام numpy
|
| 102 |
return subset[['timestamp', 'open', 'high', 'low', 'close', 'volume']].values.tolist()
|
| 103 |
|
| 104 |
async def fetch_ticker(self, symbol):
|
| 105 |
-
"""
|
| 106 |
-
محاكاة جلب السعر الحالي (Ticker).
|
| 107 |
-
نستخدم سعر إغلاق آخر شمعة 5 دقائق متاحة كلحظة سعرية حالية.
|
| 108 |
-
"""
|
| 109 |
-
# نستخدم fetch_ohlcv داخلياً لضمان اتساق الزمن
|
| 110 |
candles = await self.fetch_ohlcv(symbol, '5m', limit=1)
|
| 111 |
if candles:
|
| 112 |
-
last_close = candles[-1][4]
|
| 113 |
return {'symbol': symbol, 'last': last_close, 'timestamp': candles[-1][0]}
|
| 114 |
return {'symbol': symbol, 'last': 0.0, 'timestamp': self.current_time_ms}
|
| 115 |
-
|
| 116 |
async def fetch_tickers(self, symbols=None):
|
| 117 |
-
"""محاكاة جلب أسعار متعددة (للغربلة الأولية)"""
|
| 118 |
tickers = {}
|
| 119 |
target_syms = symbols if symbols else self.data_store.keys()
|
| 120 |
-
|
| 121 |
for sym in target_syms:
|
| 122 |
-
candles = await self.fetch_ohlcv(sym, '1d', limit=1)
|
| 123 |
current_candle = await self.fetch_ohlcv(sym, '5m', limit=1)
|
| 124 |
-
|
| 125 |
if candles and current_candle:
|
| 126 |
-
|
| 127 |
-
vol_quote = candles[-1][5] * candles[-1][4] # Volume * Close ~= QuoteVolume
|
| 128 |
tickers[sym] = {
|
| 129 |
'symbol': sym,
|
| 130 |
'last': current_candle[-1][4],
|
|
@@ -133,6 +104,5 @@ class MockKuCoin:
|
|
| 133 |
}
|
| 134 |
return tickers
|
| 135 |
|
| 136 |
-
# --- دوال وهمية لتوافق الواجهة ---
|
| 137 |
async def load_markets(self): return True
|
| 138 |
async def close(self): pass
|
|
|
|
| 1 |
# simulation_engine/mock_kucoin.py
|
| 2 |
+
# (V1.2 - Realistic Data Feeder with Debug)
|
| 3 |
|
| 4 |
import pandas as pd
|
| 5 |
import numpy as np
|
| 6 |
import os
|
|
|
|
| 7 |
|
| 8 |
class MockKuCoin:
|
| 9 |
def __init__(self, raw_data_dir):
|
| 10 |
+
self.data_store = {}
|
| 11 |
+
self.current_time_ms = 0
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
self.raw_dir = raw_data_dir
|
| 13 |
+
self._warned_empty = set()
|
| 14 |
+
print(f"🎭 [MockKuCoin] Initialized. Data source: {self.raw_dir}", flush=True)
|
| 15 |
|
| 16 |
async def load_data(self, symbols, timeframes):
|
| 17 |
+
print(f"⏳ [MockKuCoin] Loading historical data for {len(symbols)} symbols...", flush=True)
|
|
|
|
|
|
|
|
|
|
| 18 |
count = 0
|
| 19 |
for sym in symbols:
|
| 20 |
safe_sym = sym.replace('/', '')
|
| 21 |
self.data_store[sym] = {}
|
| 22 |
for tf in timeframes:
|
|
|
|
| 23 |
paths_to_try = [
|
| 24 |
os.path.join(self.raw_dir, f"{safe_sym}_{tf}.parquet"),
|
| 25 |
os.path.join(self.raw_dir, f"{sym.replace('/', '')}_{tf}.parquet")
|
| 26 |
]
|
|
|
|
| 27 |
loaded = False
|
| 28 |
for path in paths_to_try:
|
| 29 |
if os.path.exists(path):
|
| 30 |
try:
|
| 31 |
df = pd.read_parquet(path)
|
|
|
|
| 32 |
df = df.sort_values('timestamp').drop_duplicates('timestamp').reset_index(drop=True)
|
| 33 |
self.data_store[sym][tf] = df
|
| 34 |
loaded = True
|
| 35 |
count += 1
|
| 36 |
+
print(f" • Loaded {sym} {tf}: {len(df)} rows", flush=True)
|
| 37 |
break
|
| 38 |
except Exception as e:
|
| 39 |
+
print(f"⚠️ [MockKuCoin] Error loading {path}: {e}", flush=True)
|
| 40 |
+
if not loaded:
|
| 41 |
+
print(f"⚠️ [MockKuCoin] Missing file for {sym} {tf}", flush=True)
|
| 42 |
+
print(f"✅ [MockKuCoin] Loaded {count} data files into memory.", flush=True)
|
|
|
|
|
|
|
| 43 |
|
| 44 |
def set_time(self, timestamp_ms):
|
|
|
|
| 45 |
self.current_time_ms = timestamp_ms
|
| 46 |
|
| 47 |
async def fetch_ohlcv(self, symbol, timeframe, limit=500, since=None):
|
| 48 |
+
key = (symbol, timeframe)
|
|
|
|
|
|
|
|
|
|
| 49 |
if symbol not in self.data_store or timeframe not in self.data_store[symbol]:
|
| 50 |
+
if key not in self._warned_empty:
|
| 51 |
+
print(f"⚠️ [MockKuCoin] No data store for {symbol} {timeframe}", flush=True)
|
| 52 |
+
self._warned_empty.add(key)
|
| 53 |
return []
|
| 54 |
+
|
| 55 |
df = self.data_store[symbol][timeframe]
|
| 56 |
+
if df.empty:
|
| 57 |
+
if key not in self._warned_empty:
|
| 58 |
+
print(f"⚠️ [MockKuCoin] Empty DF for {symbol} {timeframe}", flush=True)
|
| 59 |
+
self._warned_empty.add(key)
|
| 60 |
+
return []
|
| 61 |
|
|
|
|
|
|
|
|
|
|
| 62 |
end_idx = np.searchsorted(df['timestamp'].values, self.current_time_ms, side='right')
|
| 63 |
+
if end_idx == 0:
|
| 64 |
+
if key not in self._warned_empty:
|
| 65 |
+
print(f"⚠️ [MockKuCoin] end_idx=0 for {symbol} {timeframe} @ {self.current_time_ms}", flush=True)
|
| 66 |
+
self._warned_empty.add(key)
|
| 67 |
+
return []
|
| 68 |
+
|
| 69 |
start_idx = 0
|
| 70 |
+
if since is not None:
|
| 71 |
+
start_idx = np.searchsorted(df['timestamp'].values, since, side='left')
|
| 72 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
limit_start_idx = max(0, end_idx - limit)
|
|
|
|
|
|
|
| 74 |
final_start_idx = max(start_idx, limit_start_idx)
|
|
|
|
|
|
|
| 75 |
if final_start_idx >= end_idx:
|
| 76 |
+
if key not in self._warned_empty:
|
| 77 |
+
print(f"⚠️ [MockKuCoin] final_start_idx>=end_idx for {symbol} {timeframe}", flush=True)
|
| 78 |
+
self._warned_empty.add(key)
|
| 79 |
return []
|
| 80 |
+
|
|
|
|
|
|
|
| 81 |
subset = df.iloc[final_start_idx:end_idx]
|
|
|
|
|
|
|
| 82 |
return subset[['timestamp', 'open', 'high', 'low', 'close', 'volume']].values.tolist()
|
| 83 |
|
| 84 |
async def fetch_ticker(self, symbol):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
candles = await self.fetch_ohlcv(symbol, '5m', limit=1)
|
| 86 |
if candles:
|
| 87 |
+
last_close = candles[-1][4]
|
| 88 |
return {'symbol': symbol, 'last': last_close, 'timestamp': candles[-1][0]}
|
| 89 |
return {'symbol': symbol, 'last': 0.0, 'timestamp': self.current_time_ms}
|
| 90 |
+
|
| 91 |
async def fetch_tickers(self, symbols=None):
|
|
|
|
| 92 |
tickers = {}
|
| 93 |
target_syms = symbols if symbols else self.data_store.keys()
|
|
|
|
| 94 |
for sym in target_syms:
|
| 95 |
+
candles = await self.fetch_ohlcv(sym, '1d', limit=1)
|
| 96 |
current_candle = await self.fetch_ohlcv(sym, '5m', limit=1)
|
|
|
|
| 97 |
if candles and current_candle:
|
| 98 |
+
vol_quote = candles[-1][5] * candles[-1][4]
|
|
|
|
| 99 |
tickers[sym] = {
|
| 100 |
'symbol': sym,
|
| 101 |
'last': current_candle[-1][4],
|
|
|
|
| 104 |
}
|
| 105 |
return tickers
|
| 106 |
|
|
|
|
| 107 |
async def load_markets(self): return True
|
| 108 |
async def close(self): pass
|