from constVar import Binance
import requests
import datetime, time
import pytz
import pandas as pd
import pymysql
import os, json
from sqlalchemy import create_engine


tz_shanghai = pytz.timezone('Asia/Shanghai')
tz_utc = datetime.timezone.utc


root_path = os.path.dirname(os.path.abspath(__file__))
with open(f"{root_path}/config.json") as fp:
    json_data = json.load(fp)
db_config_local = json_data["db_config_local"]
db_config_live = json_data["db_config_live"]


def timestamp_to_datetime_str(timestamp: int, timezone):
    return datetime.datetime.fromtimestamp(timestamp / 1000, tz=timezone).strftime('%Y-%m-%d %H:%M:%S')


def get_server_time() -> int:
    """
    获取服务器的时间
    :return:
    """
    response = requests.get(f"{Binance.base_url.value}{Binance.server_time_api.value}")
    return response.json()["serverTime"]


def get_exchange_info():
    """

    :return:
    """
    response = requests.get(f"{Binance.base_url.value}{Binance.exchange_info_url.value}")
    return response.json()

def save_symbols_info():
    sybmols = get_exchange_info()["symbols"]
    sybmols_df = pd.DataFrame(sybmols)
    path = "F:/crypto_data/binance/symbols_info.xlsx"
    sybmols_df.to_excel(path, index=False)


def get_depth(symbol="BTCUSDT", limit=500):
    response = requests.get(f"{Binance.base_url.value}{Binance.depth_url.value}".
                            replace("SYMBOL", symbol).replace("LIMIT", str(limit)))
    print(response.json())


def get_trades(symbol="BTCUSDT", limit=500):
    response = requests.get(f"{Binance.base_url.value}{Binance.trades_url.value}".
                            replace("SYMBOL", symbol).replace("LIMIT", str(limit)))
    print(response.json())


def add_datetime_str(raw_json, time_column, timezone=tz_shanghai):
    for js in raw_json:
        js["datetime_str"] = datetime.datetime.fromtimestamp(js[time_column] / 1000, tz=timezone).strftime('%Y-%m-%d %H:%M:%S')
    return


def get_historical_trades(symbol="BTCUSDT", limit=500):

    """
    报错 {'code': -2014, 'msg': 'API-key format invalid.'} 原因未知
    :param symbol:
    :param limit:
    :return:
    """
    response = requests.get(f"{Binance.base_url.value}{Binance.historical_trades_url.value}".
                            replace("SYMBOL", symbol).replace("LIMIT", str(limit)))
    print(response.json())
    print(add_datetime_str(response.json(), "time"))


def get_agg_trades(symbol="BTCUSDT", limit=500, endTime=None):
    """
    {'code': -2014, 'msg': 'API-key format invalid.'}
    接口仅支持查询最近1年的交易数据
    如果同时发送startTime和endTime，间隔必须小于一小时
    如果没有发送任何筛选参数(fromId, startTime, endTime)，默认返回最近的成交记录
    保险基金和自动减仓(ADL)成交不属于订单簿成交，故不会被归并聚合
    同时发送startTime/endTime和fromId可能导致请求超时，建议仅发送fromId或仅发送startTime和endTime
    :param symbol:
    :param limit: 默认 500; 最大 1000.
    :return:
    """
    url = f"{Binance.base_url.value}{Binance.agg_trades_url.value}".replace("SYMBOL", symbol).replace("LIMIT", str(limit))
    if endTime is not None:
        url = f"{url}&endTime={endTime}"
    response = requests.get(url)
    return response.json()



def read_sql(sql, db_name):
    conn = pymysql.connect(db=db_name, **db_config_live)
    return pd.read_sql(sql, conn)


def excute_sql(sql, db_name):
    conn = pymysql.connect(db=db_name, **db_config_live)
    cursor = conn.cursor()
    rows = cursor.execute(sql)
    conn.commit()
    cursor.close()
    conn.close()
    print(f"{rows}行更新")


def df_into_db(df, db_name, table_name):
    import numpy as np
    inf_df = df[df.isin([np.inf, -np.inf]).any(axis=1)]
    if not inf_df.empty:
        print(f"有{len(inf_df)}行存在inf值, please check")
        df.replace([float("inf"), float("-inf")], np.nan, inplace=True)
    null_df = df[df.isna().any(axis=1)]
    if not null_df.empty:
        print(f"有{len(null_df)}行存在空值, please check")
    columns_str = ",".join([f'`{col}`' for col in df.columns])
    place_holder = "%s,"*len(df.columns)
    sql = f"insert into {table_name} ({columns_str}) values ({place_holder[:-1]})"
    data = [tuple(x) for x in df.values]
    conn = pymysql.connect(db=db_name, **db_config_live)
    cursor = conn.cursor()
    # 把np.nan转换为none 数据库会处理为Null
    cleaned_data = [
        tuple(None if pd.isna(x) else x for x in row)
        for row in data
    ]
    cursor.executemany(sql, cleaned_data)
    conn.commit()
    cursor.close()
    conn.close()
    print(f"{datetime.datetime.now()} table:{table_name}, {len(data)} rows inserted")


def batch_insert_dataframe(df: pd.DataFrame, db_name:str, table_name: str, batch_size: int = 10000):
    """
    将DataFrame分批插入MySQL

    Args:
        df: 要插入的DataFrame
        table_name: 目标表名
        batch_size: 每批插入的行数
    """

    total_rows = len(df)
    inserted_rows = 0
    start_time = time.time()

    # 准备列名
    columns = list(df.columns)
    placeholders = ', '.join(['%s'] * len(columns))
    column_names = ', '.join(columns)

    insert_sql = f"INSERT INTO {table_name} ({column_names}) VALUES ({placeholders})"

    connection = pymysql.connect(db=db_name, **db_config_live)

    try:
        with connection.cursor() as cursor:
            # 优化数据库设置
            cursor.execute("SET autocommit = 0")
            cursor.execute("SET unique_checks = 0")
            cursor.execute("SET foreign_key_checks = 0")

            # 分批插入
            for i in range(0, total_rows, batch_size):
                batch_df = df.iloc[i:i + batch_size]
                batch_data = [tuple(row) for row in batch_df.itertuples(index=False)]

                try:
                    cursor.executemany(insert_sql, batch_data)
                    connection.commit()

                    inserted_rows += len(batch_data)
                    elapsed = time.time() - start_time

                    # 进度报告
                    if inserted_rows % 50000 == 0 or inserted_rows == total_rows:
                        progress = inserted_rows / total_rows * 100
                        speed = inserted_rows / elapsed
                        print(f"{datetime.datetime.now()} 进度: {inserted_rows}/{total_rows} ({progress:.1f}%) "
                              f"速度: {speed:.0f} 行/秒")

                except Exception as e:
                    connection.rollback()
                    print(f"批次 {i // batch_size + 1} 插入失败: {e}")
                    # 可以选择重试或继续
                    continue

            # 恢复数据库设置
            cursor.execute("SET unique_checks = 1")
            cursor.execute("SET foreign_key_checks = 1")
            cursor.execute("SET autocommit = 1")

    except Exception as e:
        print(f"插入过程发生错误: {e}")
        raise
    finally:
        connection.close()

    total_time = time.time() - start_time
    print(f"✅ 插入完成! 总计 {inserted_rows} 行, 耗时 {total_time:.1f} 秒")
    print(f"平均速度: {inserted_rows / total_time:.0f} 行/秒")


def save_to_mysql(df, db_name, table_name):
    """
    将DataFrame存储到MySQL数据库
    :param df: 要存储的DataFrame
    :param table_name: 表名
    :param db_config: 数据库配置字典
    """
    try:
        db_config = db_config_live
        db_config['database'] = db_name
        db_config['password'] = db_config_live['passwd']
        # 使用SQLAlchemy创建引擎
        engine = create_engine(
            f"mysql+pymysql://{db_config['user']}:{db_config['password']}@"
            f"{db_config['host']}:{db_config['port']}/{db_config['database']}"
        )

        # 存储数据
        df.to_sql(
            name=table_name,
            con=engine,
            if_exists='replace',  # 如果表存在则替换
            index=False,
            chunksize=1000  # 分批插入
        )
        print(f"成功创建表 {table_name} 并存储数据到MySQL")
    except Exception as e:
        print(f"MySQL错误: {e}")


# 使用示例
db_config_live= {
    "charset": "utf8",
    "host": "192.168.0.114",
    "port": 3307,
    "user": "admin",
    "passwd": "123456"
  }
mysql_config = {
    'host': 'localhost',
    'user': 'root',
    'password': 'yourpassword',
    'database': 'test_db',
    'port': 3306
}


if __name__ == '__main__':
    # print("上海时间", timestamp_to_datetime_str(get_server_time(), tz_shanghai))
    # print("utc时间", timestamp_to_datetime_str(get_server_time(), tz_utc))

    # sybmols = get_exchange_info()["symbols"]
    # for symbol in sybmols:
    #     if symbol["symbol"] == "BTCUSDT":
    #         print(symbol)

    get_depth(limit=5)
    # get_trades(limit=10)
    # get_historical_trades(limit=10)
    # get_agg_trades(limit=10)