import datetime as dt
from pathlib import Path
from tqdm import tqdm
import logging
import polars as pl
import pickle
import json
import time

from cryptoAlpha.dataFeed import BinanceDataFeed

            
def get_trades_day_by_day(feed, symbol: str, on_day_cb, fromId:int=None, limit: int=1000, sleep: int=0, is_agg=False):
    """
    接口中的agg_traders只能获取很短一段时间的聚合数据，这里通过获取原始的trade来自己聚合
    获取从fromId到最新的数据
    """
    def agg_trades(df):
        return df.group_by(["trading_date", "datetime", "symbol", "exchange", "price" ,"isBuyerMaker"]).agg(
            first_id = pl.col("id").first(),
            last_id = pl.col("id").last(),
            qty = pl.col("qty").sum(),
            qtyList = pl.col("qty"),
        )
    df = None
    while True:
        part = feed.get_historical_trades(symbol, fromId=fromId, limit=limit)
        if part is None or len(part) == 0:
            break
        
        if df is None:
            df = part
        else:
            df = df.vstack(part)

        fromId = df["id"].max()
        firstTd = df["datetime"].min().date()
        lastTd = df["datetime"].max().date()
        while firstTd != lastTd:
            tdDf = df.filter(pl.col("trading_date")==firstTd)
            if is_agg:
                on_day_cb(agg_trades(tdDf))
            else:
                on_day_cb(tdDf)
                
            df = df.filter(pl.col("trading_date")!=firstTd)

        if sleep > 0:
            time.sleep(sleep)

if __name__ == "__main__":
    feed = BinanceDataFeed(is_testNet=True, logLevel=logging.ERROR)
        
    dataDir = Path("./data")
    dataDir = dataDir.joinpath("trade")
    dataDir.mkdir(parents=True, exist_ok=True)
    savedFile = "saved_trade.json"
    
    def on_day_cb(df):
        df.write_parquet(dataDir.as_posix(), partition_by=["exchange","trading_date","symbol"])
        print(f"{symbol} save from {fromId}")
        saved[symbol] = df["id"].max()
        with open(savedFile, "w") as f:
            json.dump(saved, f)
    
    try:
        with open(savedFile, "r") as f:
            saved = json.load(f)
    except Exception as e:
        saved = {}
        print(e)
    
    info = feed.get_exchange_info()
    
    for i in tqdm(info["symbols"]):
        if i["quoteAsset"] != "USDT":
            continue
        symbol = i["symbol"]
        
        # 接着上次的fromId下载
        fromId = saved.get(symbol,0)
        get_trades_day_by_day(feed, symbol, fromId=fromId, limit=1000, on_day_cb=on_day_cb, is_agg=False)
               
    print("done")
        
