import os
import time
from datetime import datetime as datetime_

import numpy as np
import pandas as pd
import requests

from core.constant import *
from tools.framework import load_module_from_path
from tools.view import info_messagebox


def gen_gx_symbol(code: str, exchange: Exchange = None):
    if code[0] == '6':
        symbol = '.'.join([code, "SH"])
    elif code[0] == '0' or code[0] == '3':
        symbol = '.'.join([code, "SZ"])
    elif code[0].islower():
        symbol = code
    else:
        raise ValueError("未收录的代码类型。")

    return symbol


def download_okx(symbol: str, k_interval: str, start_time: datetime_, end_time: datetime_, file_manager):
    """从okx端下载数据"""
    file_manager.log_engine.emit("开始收集下载数据所需参数。", LogName.Running)
    # 点击确认按钮回调
    start_time_timestamp = str(int(datetime_.timestamp(start_time) * 1000))
    end_time_timestamp = str(int(datetime_.timestamp(end_time) * 1000))
    # url组合
    REST_HOST: str = "https://www.okx.com"
    path = "/api/v5/market/history-candles"
    url = REST_HOST + path

    # 初始化DataFrame
    columns = ['datetime', 'open', 'high', 'low', 'close', 'volume']
    ori_res_df_ls = []
    ori_res_df = pd.DataFrame([], columns=columns)
    # retry_num = int(retry_time)
    retry_num = 50
    time_interval = 0.5
    file_manager.log_engine.emit("下载数据所需参数收集完成。", LogName.Running)
    retry_num0 = 0
    file_manager.log_engine.emit("开始下载数据。", LogName.Running)
    file_manager.log_engine.emit("注：线程不支持单独关闭，\n如需终止下载，需关闭进程（本软件）！",
                                         LogName.Running)
    file_manager.log_engine.emit(
        f"数据获取至{datetime_.fromtimestamp(int(end_time_timestamp) / 1000).strftime('%Y-%m-%d %H:%M')}",
        LogName.Running)
    while True:
        # 组合请求
        params: dict = {"instId": symbol,
                        "bar": k_interval,
                        "limit": "100",
                        "before": start_time_timestamp,
                        "after": end_time_timestamp}

        # 从服务器获取响应
        try:
            # file_manager.log_engine.emit("向服务器发出请求", LogName.Running)
            resp: requests.Response = requests.get(url, params=params)
            # file_manager.log_engine.emit(f"成功获取数据包，{resp.json()['data']}", LogName.Running)
        except Exception as e:
            file_manager.log_engine.emit("数据获取失败", LogName.Running)
            retry_num0 += 1
            if retry_num0 <= retry_num:
                file_manager.log_engine.emit(f"获取数据错误：{e},retry num: {retry_num0}", LogName.Running)
                time.sleep(time_interval)
                continue
            else:
                file_manager.log_engine.emit(f"获取数据错误：{e},retry num: {retry_num0}", LogName.Running)
                file_manager.log_engine.emit(f"错误次数超限，本次下载失败，请检查网络！已下载的数据将正常保存。", LogName.Running)
                break
        if "code" not in resp.json():
            retry_num0 += 1
            e = "响应数据不含code字段"
            if retry_num0 <= retry_num:
                file_manager.log_engine.emit(f"获取数据错误：{e},retry num: {retry_num0}", LogName.Running)
                time.sleep(time_interval)
                continue
            else:
                file_manager.log_engine.emit(f"获取数据错误：{e},retry num: {retry_num0}", LogName.Running)
                file_manager.log_engine.emit(f"错误次数超限，本次下载失败，请检查网络！已下载的数据将正常保存。", LogName.Running)
                break

        if resp.json()["code"] != "0":
            retry_num0 += 1
            if retry_num0 <= retry_num:
                file_manager.log_engine.emit(
                    f"获取数据错误：错误码{int(resp.json()['code'])}, 信息{resp.json()['msg']}, retry num: {retry_num0}",
                    LogName.Running)
                time.sleep(time_interval)
                continue
            else:
                file_manager.log_engine.emit(
                    f"获取数据错误：错误码{int(resp.json()['code'])}, 信息{resp.json()['msg']}, retry num: {retry_num0}",
                    LogName.Running)
                file_manager.log_engine.emit(f"错误次数超限，本次下载失败，请检查网络！已下载的数据将正常保存。",
                                             LogName.Running)
                break
        # 拼接数据
        res_ls = []
        for data_one in resp.json()["data"]:
            res_ls.append(data_one[:-3])

        ori_res_next_df = pd.DataFrame(res_ls, columns=columns)
        ori_res_next_df.fillna(0, inplace=True)
        ori_res_df = pd.concat([ori_res_df, ori_res_next_df], axis=0)
        # 当ori_res_df超过5000时，建一个新的DataFrame，与数据获取结束时再进行二次拼接
        if len(ori_res_df) >= 5000:
            file_manager.log_engine.emit(
                f"数据获取至{datetime_.fromtimestamp(int(end_time_timestamp) / 1000).strftime('%Y-%m-%d %H:%M')}",
                LogName.Running)
            ori_res_df_ls.append(ori_res_df)
            ori_res_df = pd.DataFrame([], columns=columns)

        # 如果本次处理的不是最后一页
        if len(resp.json()["data"]) == 100:
            end_time_timestamp = int(res_ls[-1][0]) + 1000
        else:
            ori_res_df_ls.append(ori_res_df)
            break
    # 对ori_res_df_ls中的数据进行拼接
    for i in range(len(ori_res_df_ls)):
        if i == 0:
            ori_res_df = ori_res_df_ls[i]
        else:
            ori_res_df = pd.concat([ori_res_df, ori_res_df_ls[i]], axis=0)
    # 对数据进行排序
    ori_res_df["datetime"] = pd.to_numeric(ori_res_df["datetime"])
    ori_res_df.sort_values('datetime', inplace=True)
    if 's' in k_interval:
        str_format = "%Y-%m-%d %H:%M:%S"
    elif 'm' in k_interval:
        str_format = "%Y-%m-%d %H:%M"
    elif 'H' in k_interval:
        str_format = "%Y-%m-%d %H"
    elif 'D' in k_interval:
        str_format = "%Y-%m-%d"
    else:
        str_format = "%Y-%m-%d"
    # 将时间戳转为字符串并将index设为datetime列
    ori_res_df["datetime"] = ori_res_df["datetime"].apply(
        lambda x: datetime_.fromtimestamp(x / 1000).strftime(str_format))
    ori_res_df.set_index("datetime", inplace=True)
    end_time_real = ori_res_df.index[-1]
    # 存贮
    csv_name = f"{DataCategory.Mk.value}_{symbol}_{start_time}_{end_time_real}_{k_interval}.csv"
    csv_name = csv_name.replace(' ', '_')
    csv_name = csv_name.replace(':', '=')
    mk_data_folder = file_manager.market_data_path
    csv_path = os.path.join(mk_data_folder, csv_name)
    # ori_res_df.to_csv(csv_path)
    # 改由file_manager来存储
    file_manager.save_csv(ori_res_df, csv_path)
    file_manager.log_engine.emit(f"成功：{csv_name}已保存至{mk_data_folder}", LogName.Running)


def gen_factor_val(data_dc: dict, factor_ls, file_manager):
    # 加载因子类
    factor_am_module = load_module_from_path(file_manager.factor_am_path)

    for symbol, data in data_dc.items():
        am = factor_am_module.ArrayManagerFactor(len(data), symbol='', interval=Interval.MINUTE)
        am.open_array = data['open'].to_numpy()
        am.high_array = data['high'].to_numpy()
        am.low_array = data['low'].to_numpy()
        am.close_array = data['close'].to_numpy()
        am.volume_array = data['volume'].to_numpy()
        # 整合因子
        # 遍历数据，新增因子列
        for index, f in enumerate(factor_ls):
            method_to_call = getattr(am, f)
            # 要求所有因子给定初始值，但array参数初始值为False，为因子值自动生成提供遍历
            try:
                result = method_to_call(array=True)
            except Exception as e:
                info = f"因子{f}存在除array外未指定默认值的参数，请检查。"
                info_messagebox(info)
                return
            if not isinstance(result, np.ndarray):
                raise ValueError(f"因子{f}返回值不满足一维ndarray要求。")
            # 因子值添加到数据新列
            data[f] = result
        data_dc[symbol] = data

    return data_dc


def get_mem_f_value(value, enum_c):
    member = None
    for mem in enum_c:
        if mem.value == value:
            member = mem
            break
    if member is not None:
        return member
    else:
        raise ValueError("enum不存在的value。")


def load_data_p2_data(path, file_manager):
    data_dc = file_manager.read_dc_csv(path)
    if data_dc:
        return data_dc
    else:
        info = f"读取数据失败，路径：{path}"
        file_manager.log_engine.emit(info, LogName.Running)
        return False


