import multiprocessing
import datetime
import requests
import time
import pandas as pd
from datetime import timezone
import json
from logger import logger
import os
import shutil


def timestamp_to_utc_str(timestamp, fmt="%Y-%m-%d %H:%M:%S"):
    """
    将时间戳转换为UTC时间字符串
    """
    # 创建UTC时间对象
    utc_dt = datetime.datetime.fromtimestamp(timestamp, tz=timezone.utc)
    # 格式化为字符串
    return utc_dt.strftime(fmt)


def utc_str_to_timestamp(str_time):
    # like"2013-01-01 00:00:00"
    dt = datetime.datetime.strptime(str_time, "%Y-%m-%d %H:%M:%S")
    utc_dt = dt.replace(tzinfo=timezone.utc)
    return int(utc_dt.timestamp())


def get_kline_from_binance_with_vol(symbol, end_time, frequency="1h"):
    print(symbol)
    assert frequency in ["1h", "5m", "1d"]
    start_time = 1451606400 * 1000  # utc时间: 2016-01-01 00:00:00
    all_datas = []
    while True:
        print(datetime.datetime.fromtimestamp(start_time / 1000))
        limit = 1000   # binance一次性最多可以拿1000条
        try:
            url = f"https://api.binance.com/api/v3/klines?symbol={symbol}&interval={frequency}&startTime={start_time}&limit={limit}"
            response = requests.get(url)
        except Exception as e:
            print(e)
            time.sleep(10)
            continue
        if response.status_code != 200:
            continue
        data = response.json()
        data = [x[:6] + x[7:11] for x in data]
        start_time = data[-1][0]
        all_datas.extend(data)
        if len(data) < 1000:
            break
    all_df = pd.DataFrame(all_datas, columns=["datetime", "open", "high", "low", "close", "vol", "amount",
                                              "number_of_trades", "active_vol", "active_amount"])
    all_df.rename(columns={"datetime": "timestamp"}, inplace=True)
    all_df["datetime"] = all_df["timestamp"].apply(lambda x: timestamp_to_utc_str(x/1000))
    all_df.drop_duplicates(subset=["timestamp"], keep="last", inplace=True)
    all_df["symbol"] = symbol
    all_df["datasource"] = "binance"
    all_df["frequency"] = frequency
    all_df["type"] = "spot"
    all_df = all_df[all_df["timestamp"] <= end_time]
    all_df.to_csv(f"G:/binance_k_line/v1/{frequency}/binance_{symbol}_kline_{frequency}.csv", index=False)


def get_all_coins():
    url = "https://api.binance.com/api/v3/exchangeInfo"
    req = requests.get(url)
    all_data = json.loads(req.content)
    symbols = all_data['symbols']
    print(f"所有币种数量:{len(symbols)}")
    symbols = [(x['symbol'], x['baseAsset'], x['quoteAsset']) for x in symbols if x['status'] == 'TRADING']
    print(f"正常交易的币种数量:{len(symbols)}")
    # for pair, baseAsset, quoteAsset in symbols:
    #     print(f"{baseAsset}/{quoteAsset}")
    #     assert pair == baseAsset + quoteAsset
    symbols = [x[0] for x in symbols]
    return symbols


def get_exist_symbols(frequency):
    files = os.listdir(f"G:/binance_k_line/v1/{frequency}")
    exist_symbols = [x.split("_")[1] for x in files]
    return exist_symbols


def move_folder_contents(source_folder, dest_folder):
    # 创建目标文件夹
    # os.makedirs(dest_folder, exist_ok=True)
    path_list = []
    # 遍历源文件夹中的所有内容
    for item in os.listdir(source_folder):
        source_path = os.path.join(source_folder, item)
        dest_path = os.path.join(dest_folder, item)
        path_list.append((source_path, dest_path))
    for source_path, dest_path in path_list:
        print(source_path, dest_path)
        # 移动文件或文件夹
        # if os.path.isdir(source_path):
        #     shutil.move(source_path, dest_path)
        # else:
        #     shutil.move(source_path, dest_path)
    
    print(f"所有内容已从 {source_folder} 移动到 {dest_folder}")


def merge_v1_v2():
    path = f"G:/binance_k_line/"
    dirs = os.listdir(path)
    max_dir = max(dirs)
    next_dir = (datetime.datetime.strptime(max_dir, "%Y%m%d") + datetime.timedelta(days=1)).strftime("%Y%m%d")
    print(f"max_dir:{max_dir},next_dir:{next_dir}")
    for frequency in ["1d", "1h", "5m"]:
        os.makedirs(f"{path}/{next_dir}/v1/{frequency}", exist_ok=True)
        files1 = os.listdir(f"{path}/{max_dir}/v1/{frequency}")
        files2 = os.listdir(f"{path}/{max_dir}/v2/{frequency}")
        all_files = set(files1) | set(files2)
        print(f"frequency:{frequency},files1长度:{len(files1)},files2长度:{len(files2)},全部长度:{len(all_files)}")
        for file in all_files:
            all_df = pd.DataFrame()
            if file in files1:
                tmp_df = pd.read_csv(f"{path}/{max_dir}/v1/{frequency}/{file}")
                print(f"file:{file},v1长度:{len(tmp_df)}")
                all_df = pd.concat([all_df, tmp_df], ignore_index=True)
            if file in files2:
                tmp_df = pd.read_csv(f"{path}/{max_dir}/v2/{frequency}/{file}")
                print(f"file:{file},v2长度:{len(tmp_df)}")
                all_df = pd.concat([all_df, tmp_df], ignore_index=True)
            print(f"file:{file},all长度:{len(all_df)}")
            all_df.to_csv(f"{path}/{next_dir}/v1/{frequency}/{file}", index=False)


if __name__ == '__main__':
    all_symbols = get_all_coins()
    before_yesterday = datetime.date.today()-datetime.timedelta(days=2)
    end_timestamp = utc_str_to_timestamp(before_yesterday.strftime('%Y-%m-%d %H:%M:%S'))*1000
    params = []
    frequency = "5m"
    exist_symbols = get_exist_symbols(frequency)
    all_symbols = list(set(all_symbols) - set(exist_symbols))
    logger.info(f"开始从binance获取{frequency}的k线数据")
    for symbol in all_symbols:
        params.append((symbol, end_timestamp, frequency))
    with multiprocessing.Pool(60) as pool:
        pool.starmap(get_kline_from_binance_with_vol, params)
    logger.info(f"成功从binance获取{frequency}的k线数据")
