"""
@author  : MG
@Time    : 2020/9/27 8:10
@File    : reversion_rights_factor.py
@contact : mmmaaaggg@163.com
@desc    : 用于计算给定期货品种的前复权因子、对应的合约及日期，导入数据库
2015年12月31日以前的数据不处理
由于 CZCE 的数据存在合约代码复用的问题，导致复权数据出现严重偏差，因此，对于过早的历史数据，不做复权计算。

另外，每个季度需要全量更新一下 wind_future_info 表

删除历史数据脚本：
delete from wind_future_daily where wind_code in (
"TA211.CZC",
"TA212.CZC",
"TA301.CZC",
"TA302.CZC",
"TA303.CZC",
"TA304.CZC",
"TA305.CZC",
"TA306.CZC",
"TA307.CZC",
"TA308.CZC",
"TA309.CZC",
"TA310.CZC",
"TA311.CZC",
"TA312.CZC",
"TA401.CZC",
"TA402.CZC",
"TA403.CZC",
"TA404.CZC",
"TA405.CZC",
"TA406.CZC",
"TA407.CZC",
"TA408.CZC",
"TA409.CZC",
"TA410.CZC",
"TA411.CZC",
"TA412.CZC",
"TA501.CZC",
"TA502.CZC",
"TA503.CZC",
"TA504.CZC",
"TA505.CZC",
"TA506.CZC",
"TA507.CZC",
"TA508.CZC",
"TA509.CZC",
"TA510.CZC",
"TA511.CZC",
"TA512.CZC"
) and trade_date < "2015-12-31"

由于郑商所历史合约代码重复，因此需要将10年前的历史数据进行清楚，否则将导致复权出现问题。
"""
import datetime
import logging
import os
from collections import defaultdict
from enum import Enum
from multiprocessing import Pool
from typing import Callable, Optional, Dict, Tuple

import numpy as np
import pandas as pd
from ibats_utils.db import bunch_insert_on_duplicate_update, with_db_session
from ibats_utils.mess import str_2_float, date_2_str
from sqlalchemy.dialects.mysql import DOUBLE
from sqlalchemy.types import String, Date

from tasks import app
from tasks.backend import engine_md
from tasks.config import config
from tasks.wind.future_reorg.reorg_md_2_db import is_earlier_instruments, is_later_instruments, \
    get_all_instrument_type, get_instrument_last_trade_date_dic

logger = logging.getLogger()
IGNORE_INSTRUMENT_TYPE_SET = {
    'TC',  # 动力煤 已经更名 ZC
    'ER',  # 早籼稻 已经退市品种
    'RO',  # 菜油  已经退市
    'WS',  # 强筋小麦 2003-03-28 上市 已经退市
    'WT',  # 硬麦 已经退市
}


class ReversionRightsMethod(Enum):
    """
    方法枚举，value为对应的默认 adj_factor 因子值
    """
    division = 1
    diff = 0


def calc_adj_factor(close_df, trade_date, instrument_id_curr, instrument_id_last,
                    method: ReversionRightsMethod = ReversionRightsMethod.division):
    """
    合约切换时计算两合约直接的价格调整因子
    该调整因子为前复权因子，使用方法
    method='division'
    复权后价格 = instrument_id_last 合约的价格 * adj_factor
    method='diff'
    复权后价格 = instrument_id_last 合约的价格 + adj_factor
    :param close_df:
    :param trade_date:
    :param instrument_id_curr:
    :param instrument_id_last:
    :param method: division 除法  diff  差值发
    :return:
    """
    close_last = close_df[instrument_id_last][trade_date]
    close_curr = close_df[instrument_id_curr][trade_date]
    if method == ReversionRightsMethod.division:
        adj_factor = close_curr / close_last
    elif method == ReversionRightsMethod.diff:
        adj_factor = close_curr - close_last
    else:
        raise ValueError(f"method {method} 不被支持")
    return adj_factor


def generate_reversion_rights_factors(instrument_type, switch_by_key='position',
                                      method: ReversionRightsMethod = ReversionRightsMethod.division,
                                      from_trade_date='2015-12-31',
                                      # from_trade_date=None,
                                      ) -> Tuple[Optional[pd.DataFrame], Optional[datetime.date]]:
    """
    给定期货品种，历史合约的生成前复权因子
    :param instrument_type: 合约品种，RB、I、HC 等
    :param switch_by_key: position 持仓量, volume 成交量, st_stock 注册仓单量
    :param method: division 除法  diff  差值发
    :param from_trade_date: 计算复权数据的起始日期
    :return:
    """
    instrument_last_trade_date_dic = get_instrument_last_trade_date_dic()
    instrument_type = instrument_type.upper()
    # 获取当前期货品种全部历史合约的日级别行情数据
    if from_trade_date:
        sql_str = r"""select wind_code, trade_date, open, close, """ + switch_by_key + """ 
          from wind_future_daily where wind_code regexp %s and trade_date > %s"""
        data_df = pd.read_sql(sql_str, engine_md,
                              params=[r'^%s[0-9]+\.[A-Z]{3,4}' % (instrument_type,), from_trade_date])
    else:
        sql_str = r"""select wind_code, trade_date, open, close, """ + switch_by_key + """ 
          from wind_future_daily where wind_code regexp %s"""
        data_df = pd.read_sql(sql_str, engine_md, params=[r'^%s[0-9]+\.[A-Z]{3,4}' % (instrument_type,)])

    if data_df.shape[0] == 0:
        logger.warning(f"{instrument_type} from_trade_date={from_trade_date} 没有数据")
        return None, None
    close_df = data_df.pivot(index="trade_date", columns="wind_code", values="close")
    switch_by_df = data_df.pivot(index="trade_date", columns="wind_code", values=switch_by_key).sort_index()
    if switch_by_df.shape[0] == 0:
        logger.warning("查询 %s 包含 %d 条记录 %d 个合约，跳过", instrument_type, *switch_by_df.shape)
    else:
        logger.info("查询 %s 包含 %d 条记录 %d 个合约", instrument_type, *switch_by_df.shape)

    return generate_reversion_rights_factors_by_df(
        instrument_type, switch_by_key, close_df, switch_by_df,
        instrument_last_trade_date_dic,
        method)


def generate_reversion_rights_factors_by_df(
        instrument_type, switch_by_key, close_df, switch_by_df,
        instrument_last_trade_date_dic,
        method: ReversionRightsMethod = ReversionRightsMethod.division
) -> Tuple[Optional[pd.DataFrame], Optional[datetime.date]]:
    # 主力合约号，次主力合约号
    instrument_id_main, instrument_id_secondary = None, None
    # date_instrument_id_dic = {}
    # 按合约号排序，历史合约号从远至近排序
    instrument_id_list_sorted = []
    instrument_id_list_missing = []
    for _ in switch_by_df.columns:
        if _ not in instrument_last_trade_date_dic:
            instrument_id_list_missing.append(_)
            # logger.warning(f"{_} 合约没有找到相应的 last_trade_date 数据")
            continue
        instrument_id_list_sorted.append(_)

    if instrument_id_list_missing:
        logger.warning(f"{len(instrument_id_list_missing)} 合约没有找到相应的 last_trade_date 数据，"
                       f"包括：{instrument_id_list_missing}")

    # 按照 last_trade_date 排序
    instrument_id_list_sorted.sort(key=lambda x: instrument_last_trade_date_dic[x])
    switch_by_df = switch_by_df[instrument_id_list_sorted]
    date_adj_factor_dic = defaultdict(dict)
    # 逐日检查主力合约，次主力合约列表
    # 主要逻辑：
    # 每天检查次一日的主力合约，次主力合约
    # 主力合约为当日“持仓量”最大合约，合约号只能前进不能后退
    # 次主力合约的交割日期要大于主力合约的交割日期，
    trade_date_list = list(switch_by_df.index)
    trade_date_latest = np.max(trade_date_list)
    trade_date_available_list = []
    trade_date, trade_date_last = None, None  # 当前交易日， 上一个交易日日期
    continue_warning_message_dic: Dict[str, dict] = {}
    # count_of_keep_no_instrument_id_secondary = 0
    # from_idx, to_idx, from_trade_date, to_trade_date = None, None, None, None
    for n, trade_date in enumerate(trade_date_list):
        # if instrument_id_main is not None \
        #         and date_instrument_id_dic[trade_date_last] != date_instrument_id_dic[trade_date]:
        #     # 前一日根据 switch_by_s 变化判定主力合约，次主力合约是否切换
        #     # 次日记录主力合约切换
        #     # 仅记录合约变化的情况
        #     date_instrument_id_dic[trade_date] = (instrument_id_main, instrument_id_secondary)

        # 筛选有效数据
        switch_by_s = switch_by_df.loc[trade_date].dropna()
        if switch_by_s.shape[0] == 0:
            # logger.warning("%d) %s %s 没有 %s 数据", n, instrument_type, trade_date, switch_by_key)
            msg_dic = continue_warning_message_dic.setdefault(
                "switch_by_key_missing",
                {
                    'count': 0,
                    'from_idx': n,
                    'to_idx': n,
                    'from_trade_date': trade_date,
                    'to_trade_date': trade_date,
                }
            )
            msg_dic['count'] += 1
            msg_dic['to_idx'] = n
            msg_dic['to_trade_date'] = trade_date
            continue
        else:
            msg_dic: Optional[dict] = continue_warning_message_dic.pop('switch_by_key_missing', None)
            if msg_dic:
                logger.warning(
                    "{from_idx} ~ {to_idx}[{count}] [{from_trade_date} ~ {to_trade_date}] "
                    "{instrument_type} 没有 {switch_by_key} 数据".format(
                        instrument_type=instrument_type, switch_by_key=switch_by_key, **msg_dic)
                )

        instrument_id_main_last, instrument_id_secondary_last = instrument_id_main, instrument_id_secondary
        # 循环查找各个合约，寻找更合适的主力合约
        for instrument_id in switch_by_s.index:
            if instrument_id_main is not None \
                    and is_earlier_instruments(instrument_id, instrument_id_main, instrument_last_trade_date_dic):
                # 如果 合约日期比 当前主力合约日期早，则直接跳过
                continue
            # 判断主力合约
            if instrument_id_main is None:
                instrument_id_main = instrument_id
            elif instrument_id_main not in switch_by_s or switch_by_s[instrument_id_main] < switch_by_s[instrument_id]:
                instrument_id_main = instrument_id
                if instrument_id_secondary is not None \
                        and (instrument_id_main == instrument_id_secondary
                             or is_later_instruments(instrument_id_main, instrument_id_secondary,
                                                     instrument_last_trade_date_dic)):
                    # 如果次主力合约不是晚于主力合约则至 None，重新寻找合适的次主力合约
                    instrument_id_secondary = None
            else:
                pass

            # 判断次主力合约
            if instrument_id_secondary is None:
                if instrument_id_main != instrument_id:
                    instrument_id_secondary = instrument_id
            elif is_earlier_instruments(instrument_id_secondary, instrument_id, instrument_last_trade_date_dic) \
                    and instrument_id_secondary in switch_by_s \
                    and switch_by_s[instrument_id_secondary] < switch_by_s[instrument_id]:
                instrument_id_secondary = instrument_id

        # 检查主力合约是否确定
        if instrument_id_main is None:
            logger.warning("%d) %s 主力合约缺少主力合约", n, trade_date)

        # 检查次主力合约是否确定
        if 'missing_instrument_id_secondary' in continue_warning_message_dic:
            msg_dic: Optional[dict] = continue_warning_message_dic['missing_instrument_id_secondary']
            if msg_dic['instrument_id_main'] != instrument_id_main:
                logger.warning(
                    "{from_idx} ~ {to_idx}[{count}] [{from_trade_date} ~ {to_trade_date}] "
                    "当日主力合约 {instrument_id_main}, 没有次主力合约".format(**msg_dic)
                )
                del continue_warning_message_dic['missing_instrument_id_secondary']

        if instrument_id_secondary is None:  # and instrument_id_main_last is not None
            msg_dic = continue_warning_message_dic.setdefault(
                'missing_instrument_id_secondary',
                {
                    'count': 0,
                    'from_idx': n,
                    'to_idx': n,
                    'from_trade_date': trade_date,
                    'to_trade_date': trade_date,
                    'instrument_id_main': instrument_id_main,
                }
            )
            msg_dic['count'] += 1
            msg_dic['to_idx'] = n
            msg_dic['to_trade_date'] = trade_date
            # 为减少重复日志，将连续警告日志进行合并
            # logger.warning("%d) %s 当日主力合约 %s, 没有次主力合约",
            #                n, trade_date, instrument_id_main)
        else:
            msg_dic: Optional[dict] = continue_warning_message_dic.pop('missing_instrument_id_secondary', None)
            if msg_dic:
                logger.warning(
                    "{from_idx} ~ {to_idx}[{count}] [{from_trade_date} ~ {to_trade_date}] "
                    "当日主力合约 {instrument_id_main}, 没有次主力合约".format(**msg_dic)
                )

        # 如果主力合约切换，则计算调整因子
        if instrument_id_main_last is not None \
                and instrument_id_main_last != instrument_id_main:
            trade_date_last = trade_date_available_list[-1]
            adj_chg = calc_adj_factor(
                close_df, trade_date_last,
                instrument_id_curr=instrument_id_main,
                instrument_id_last=instrument_id_main_last,
                method=method
            )
            if not (np.isnan(adj_chg) or np.isinf(adj_chg)):
                date_adj_factor_dic[trade_date_last]['adj_factor_main'] = adj_chg
                date_adj_factor_dic[trade_date_last]['instrument_id_main'] = instrument_id_main_last

        # 如果次主力合约切换，则计算调整因子
        if instrument_id_secondary_last is not None \
                and instrument_id_secondary is not None \
                and instrument_id_secondary_last != instrument_id_secondary:
            trade_date_last = trade_date_available_list[-1]
            adj_chg = calc_adj_factor(
                close_df, trade_date_last,
                instrument_id_curr=instrument_id_secondary,
                instrument_id_last=instrument_id_secondary_last,
                method=method
            )
            if not (np.isnan(adj_chg) or np.isinf(adj_chg)):
                date_adj_factor_dic[trade_date_last]['adj_factor_secondary'] = adj_chg
                date_adj_factor_dic[trade_date_last]['instrument_id_secondary'] = instrument_id_secondary_last

        # 记录有效日期
        trade_date_available_list.append(trade_date)
        # trade_date_last = trade_date

    msg_dic: Optional[dict] = continue_warning_message_dic.pop('switch_by_key_missing', None)
    if msg_dic:
        logger.warning(
            "{from_idx} ~ {to_idx}[{count}] [{from_trade_date} ~ {to_trade_date}] "
            "{instrument_type} 没有 {switch_by_key} 数据".format(
                instrument_type=instrument_type, switch_by_key=switch_by_key, **msg_dic)
        )

    msg_dic: Optional[dict] = continue_warning_message_dic.pop('missing_instrument_id_secondary', None)
    if msg_dic:
        logger.warning(
            "{from_idx} ~ {to_idx}) [{from_trade_date} ~ {to_trade_date}] "
            "当日主力合约 {instrument_id_main}, 没有次主力合约".format(**msg_dic)
        )

    if trade_date is None or instrument_id_main is None or instrument_id_secondary is None:
        logger.warning("当前品种 %s 最后一个交易日期 %s 主力合约 %s 次主力合约 %s，历史数据错误",
                       instrument_type, trade_date, instrument_id_main, instrument_id_secondary)
    else:
        # 记录最新一个交易日的主力合约次主力合约调整因子 为1
        date_adj_factor_dic[trade_date] = {
            'adj_factor_main': method.value,
            'instrument_id_main': instrument_id_main,
            'adj_factor_secondary': method.value,
            'instrument_id_secondary': instrument_id_secondary,
        }

    # 构造复权因子数据
    adj_factor_df = pd.DataFrame(date_adj_factor_dic).T.sort_index(ascending=False)
    available_titles = [
        "instrument_id_main",
        "adj_factor_main",
        "instrument_id_secondary",
        "adj_factor_secondary",
    ]
    missing_columns = set(available_titles) - set(adj_factor_df.columns)
    if len(missing_columns) > 0:
        logger.error("%s 缺少 %s 数据信息，无法生成复权因子", instrument_type, missing_columns)
        return None, None

    adj_factor_df = adj_factor_df[available_titles]
    if method == ReversionRightsMethod.division:
        adj_factor_df['adj_factor_main'] = adj_factor_df['adj_factor_main'].fillna(1).cumprod()
        adj_factor_df['adj_factor_secondary'] = adj_factor_df['adj_factor_secondary'].fillna(1).cumprod()
    elif method == ReversionRightsMethod.diff:
        adj_factor_df['adj_factor_main'] = adj_factor_df['adj_factor_main'].fillna(1).cumsum()
        adj_factor_df['adj_factor_secondary'] = adj_factor_df['adj_factor_secondary'].fillna(1).cumsum()
    else:
        raise ValueError(f"method={method} 不被支持")

    adj_factor_df = adj_factor_df.ffill().sort_index().reset_index().rename(
        columns={'index': 'trade_date'})
    adj_factor_df["instrument_type"] = instrument_type
    return adj_factor_df, trade_date_latest


def update_df_2_db(instrument_type, table_name, data_df, method: ReversionRightsMethod, dtype=None):
    """将 DataFrame 数据保存到 数据库对应的表中"""
    # 为了解决 AttributeError: 'numpy.float64' object has no attribute 'translate' 错误，需要将数据类型转换成 float
    data_df["adj_factor_main"] = data_df["adj_factor_main"].apply(str_2_float)
    data_df["adj_factor_secondary"] = data_df["adj_factor_secondary"].apply(str_2_float)
    # 清理历史记录
    with with_db_session(engine_md) as session:
        sql_str = "SELECT table_name FROM information_schema.TABLES " \
                  "WHERE table_name = :table_name and TABLE_SCHEMA=(select database())"
        # 复权数据表
        is_existed = session.execute(sql_str, params={"table_name": table_name}).fetchone()
        if is_existed is not None:
            session.execute("delete from %s where instrument_type = :instrument_type and method= :method" % table_name,
                            params={"instrument_type": instrument_type, "method": method.name})
            logger.debug("删除 %s 中的 %s 历史数据，重新载入新的复权数据", table_name, instrument_type)

    # 插入数据库
    # pd.DataFrame.to_sql(data_df, table_name, engine_md, if_exists='append', index=False, dtype=dtype)
    bunch_insert_on_duplicate_update(
        data_df, table_name, engine_md,
        dtype=dtype, myisam_if_create_table=True,
        primary_keys=['trade_date', 'instrument_type', 'method'], schema=config.DB_SCHEMA_MD)


def save_adj_factor_all(
        instrument_types: list, db_table_name='wind_future_adj_factor', to_csv=True, multi_process=0,
        generate_reversion_rights_factors_func=generate_reversion_rights_factors,
        ignore_instrument_type_set=None,
):
    """

    :param instrument_types: 合约类型
    :param db_table_name: 保存到数据库名称，None 为不保存数据库
    :param to_csv: 是否保存到csv文件
    :param multi_process: 多进程运行 <=1 为单进程运行
    :param generate_reversion_rights_factors_func: 生成复权因子的函数
    :param ignore_instrument_type_set: 忽略部分品种
    :return:
    """
    if ignore_instrument_type_set is None:
        ignore_instrument_type_set = IGNORE_INSTRUMENT_TYPE_SET.copy()

    if to_csv:
        to_csv_dir_path = 'output'
        # 建立 output folder
        os.makedirs(to_csv_dir_path, exist_ok=True)
    else:
        to_csv_dir_path = None

    if multi_process > 1:
        pool = Pool(multi_process)
    else:
        pool = None

    for method in ReversionRightsMethod:
        for n, instrument_type in enumerate(instrument_types):
            if instrument_type.upper() in ignore_instrument_type_set:
                logger.warning(f"忽略 {instrument_type}")
                continue
            if pool is None:
                save_adj_factor(
                    instrument_type, method, db_table_name, to_csv_dir_path,
                    generate_reversion_rights_factors_func)
            else:
                pool.apply_async(
                    save_adj_factor,
                    args=(instrument_type, method, db_table_name, to_csv_dir_path,
                          generate_reversion_rights_factors_func))

    if pool is not None:
        pool.join()


def save_adj_factor(
        instrument_type: str, method: ReversionRightsMethod,
        db_table_name='wind_future_adj_factor', to_csv_dir_path=None,
        generate_reversion_rights_factors_func: Callable = generate_reversion_rights_factors
) -> Tuple[Optional[pd.DataFrame], Optional[datetime.date]]:
    """

    :param instrument_type: 合约类型
    :param method: 合约类型
    :param db_table_name: 保存到数据库名称，None 为不保存数据库
    :param to_csv_dir_path: 是否保存到csv文件
    :param generate_reversion_rights_factors_func: 生成复权因子的函数
    :return:
    """
    logger.info("生成 %s 复权因子[%s]", instrument_type, method.name)
    adj_factor_df, trade_date_latest = generate_reversion_rights_factors_func(instrument_type, method=method)
    if adj_factor_df is None:
        return None, None

    if to_csv_dir_path is not None:
        csv_file_name = f'adj_factor_{instrument_type}_{method.name}.csv'
        folder_path = os.path.join(to_csv_dir_path, date_2_str(trade_date_latest))
        csv_file_path = os.path.join(folder_path, csv_file_name)
        os.makedirs(folder_path, exist_ok=True)
        adj_factor_df.to_csv(csv_file_path, index=False)

    if db_table_name is not None:
        dtype = {
            'trade_date': Date,
            'instrument_id_main': String(20),
            'adj_factor_main': DOUBLE,
            'instrument_id_secondary': String(20),
            'adj_factor_secondary': DOUBLE,
            'instrument_type': String(20),
            'method': String(20),
        }
        adj_factor_df['method'] = method.name
        update_df_2_db(instrument_type, db_table_name, adj_factor_df, method=method, dtype=dtype)

    logger.info("生成 %s 复权因子 %d 条记录[%s]",  # \n%s
                instrument_type, adj_factor_df.shape[0], method.name
                # , adj_factor_df
                )


def _test_generate_reversion_rights_factors():
    adj_factor_df, trade_date_latest = generate_reversion_rights_factors(instrument_type='MA')
    print(adj_factor_df)


@app.task
def task_save_adj_factor(chain_param=None, instrument_types=None):
    # instrument_types = ['rb', 'i', 'hc']
    # instrument_types = ['ta', 'sr', 'cf', 'ws']
    if instrument_types is None:
        instrument_types = get_all_instrument_type()
    else:
        instrument_types = [_.upper() for _ in instrument_types]

    save_adj_factor_all(
        instrument_types=instrument_types, multi_process=0,
        generate_reversion_rights_factors_func=generate_reversion_rights_factors
    )


def backup_to_db(table_name, new_engine, dtype):
    sql_str = f"select * from {table_name}"
    df = pd.read_sql(sql_str, engine_md)
    df.to_sql(table_name, new_engine, dtype=dtype, if_exists='replace', index=False)
    logger.info("%s 备份到 vnpy 数据库完成，%d 条数据", table_name, df.shape[0])


if __name__ == "__main__":
    # _test_generate_reversion_rights_factors()
    task_save_adj_factor(instrument_types=None)  # ['ME']
    from tasks.wind.to_vnpy import reversion_rights_factors_2_vnpy

    reversion_rights_factors_2_vnpy()
