import multiprocessing
import datetime
import requests
import time
import pandas as pd
from utils import read_sql, excute_sql, df_into_db
from datetime import timezone
import json
from logger import logger
import os


def timestamp_to_utc_str(timestamp, fmt="%Y-%m-%d %H:%M:%S"):
    """
    将时间戳转换为UTC时间字符串
    """
    # 创建UTC时间对象
    utc_dt = datetime.datetime.fromtimestamp(timestamp, tz=timezone.utc)
    # 格式化为字符串
    return utc_dt.strftime(fmt)


def utc_str_to_timestamp(str_time):
    # like"2013-01-01 00:00:00"
    dt = datetime.datetime.strptime(str_time, "%Y-%m-%d %H:%M:%S")
    utc_dt = dt.replace(tzinfo=timezone.utc)
    return int(utc_dt.timestamp())


def get_latest_time(symbol, frequency):
    file = f"G:/binance_k_line/v1/{frequency}/binance_{symbol}_kline_{frequency}.csv"
    if not os.path.exists(file):
        return None
    df = pd.read_csv(file)
    if df.empty:
        return None
    return df["timestamp"].max()


def update_kline_from_binance_with_vol(symbol, end_time, frequency="1h"):
    print(symbol)
    assert frequency in ["1h", "5m", "1d"]
    latest_time = get_latest_time(symbol, frequency)
    if latest_time is None:
        start_time = 1451606400 * 1000  # utc时间: 2016-01-01 00:00:00
    else:
        start_time = latest_time
    all_datas = []
    while True:
        print(datetime.datetime.fromtimestamp(start_time / 1000))
        limit = 1000   # binance一次性最多可以拿1000条
        try:
            url = f"https://api.binance.com/api/v3/klines?symbol={symbol}&interval={frequency}&startTime={start_time}&limit={limit}"
            response = requests.get(url)
        except Exception as e:
            print(e)
            time.sleep(10)
            continue
        if response.status_code != 200:
            continue
        data = response.json()
        data = [x[:6] + x[7:11] for x in data]
        start_time = data[-1][0]
        all_datas.extend(data)
        if len(data) < 1000:
            break
    all_df = pd.DataFrame(all_datas, columns=["datetime", "open", "high", "low", "close", "vol", "amount",
                                              "number_of_trades", "active_vol", "active_amount"])
    all_df.rename(columns={"datetime": "timestamp"}, inplace=True)
    all_df["datetime"] = all_df["timestamp"].apply(lambda x: timestamp_to_utc_str(x/1000))
    all_df.drop_duplicates(subset=["timestamp"], keep="last", inplace=True)
    all_df["symbol"] = symbol
    all_df["datasource"] = "binance"
    all_df["frequency"] = frequency
    all_df["type"] = "spot"
    all_df = all_df[all_df["timestamp"] <= end_time]
    all_df = all_df[all_df["timestamp"] > latest_time]
    all_df.to_csv(f"G:/binance_k_line/v2/{frequency}/binance_{symbol}_kline_{frequency}.csv", index=False)


def get_exist_coins_info():
    info_df = read_sql("select * from binance_symbol_info", db_name="binance")
    return info_df

def get_exist_symbols(frequency):
    files = os.listdir(f"G:/binance_k_line/v2/{frequency}")
    exist_symbols = [x.split("_")[1] for x in files]
    return exist_symbols

def update_symbol_info():
    info_df_all = get_exist_coins_info()
    info_df = info_df_all[info_df_all["trading"] == 1]
    url = "https://api.binance.com/api/v3/exchangeInfo"
    req = requests.get(url)
    all_data = json.loads(req.content)
    symbols = all_data['symbols']
    print(f"所有币种数量:{len(symbols)}")
    symbols = [(x['symbol'], x['baseAsset'], x['quoteAsset']) for x in symbols if x['status'] == 'TRADING']
    symbols_df = pd.DataFrame(symbols, columns=["symbol", "base_asset", "quote_asset"])
    symbols_df["one_hour_hash"] = symbols_df["symbol"].apply(lambda x: hash(x) % 10 + 1)
    symbols_df["five_minute_hash"] = symbols_df["symbol"].apply(lambda x: hash(x) % 100 + 1)
    inner_symbols = set(symbols_df["symbol"]) & set(info_df["symbol"])
    add_symbols = set(symbols_df["symbol"]) - set(info_df["symbol"])
    minus_symbols = set(info_df["symbol"]) - set(symbols_df["symbol"])
    inner_df1 = info_df[info_df["symbol"].isin(inner_symbols)].sort_values(by="symbol")
    inner_df2 = symbols_df[symbols_df["symbol"].isin(inner_symbols)].sort_values(by="symbol")
    inner_df1.equals(inner_df2)
    if minus_symbols:
        minus_sql = f"update binance_symbol_info set trading = 0 where symbol in {tuple(minus_symbols)}"
        excute_sql(minus_sql, db_name="binance")
    if add_symbols:
        assert not add_symbols.isdisjoint(set(info_df_all["symbol"])), "防止停止交易的symbol又开始交易" 
        add_df = symbols_df[symbols_df["symbol"].isin(add_symbols)]
        df_into_db(add_df, db_name="binance", table_name="binance_symbol_info")
    return symbols_df["symbol"].tolist()


if __name__ == '__main__':
    all_symbols = update_symbol_info()
    before_yesterday = datetime.date.today()-datetime.timedelta(days=2)
    end_timestamp = utc_str_to_timestamp(before_yesterday.strftime('%Y-%m-%d %H:%M:%S'))*1000
    params = []
    frequency = "1d"
    logger.info(f"开始从binance获取{frequency}的k线数据")
    exist_symbols = get_exist_symbols(frequency)
    logger.info(f"exist_symbols={exist_symbols}")
    for symbol in all_symbols:
        if symbol in exist_symbols:
            continue
        params.append((symbol, end_timestamp, frequency))
        if frequency == "1d":
            update_kline_from_binance_with_vol(*params[-1])
    if frequency != "1d":
        with multiprocessing.Pool(60) as pool:
            pool.starmap(update_kline_from_binance_with_vol, params)
    logger.info(f"成功从binance获取{frequency}的k线数据")
