"""
    因子处理通用方法
"""
import pandas as pd
import numpy as np
import datetime
from sklearn.linear_model import HuberRegressor
from data_resource.data_bases import engine
from utilities.utilities_func import sliding_window


# 因子值标准化
def factor_standardize(signal, method='zscore', extreme=True, if_standardize=True, extreme_thred=5) -> pd.Series:
    """因子标准化
    
    Args:
        signal (pd.Series): 待标准化的因子序列
        method (str): 标准化方法，默认为'zscore'
            - zscore: 正态标准化
            - minmax: 极大极小缩放
            - rank: 百分比排序，并以0.5作为中心化
        extreme: 是否进行去极值，
        if_standardize: 是否对因子进行标准化
        extreme_thred: n倍mad去极值
    
    Returns:
        pd.Series: 标准化后的因子序列，保留原始index
    """
    if not isinstance(signal, pd.Series):
        raise ValueError("输入信号必须是pd.Series格式")

    if signal.isna().all():
        return signal

    # 因子值去极值处理
    if extreme:
        median = signal.median()
        mad = (signal - median).abs().median()
        # 3*MAD 去极值
        signal = signal.clip(lower=median - extreme_thred * mad, upper=median + extreme_thred * mad)

    # 标准化
    if if_standardize:
        if method == 'zscore':
            mean = signal.mean()
            std = signal.std()
            standardized = (signal - mean) / (std + 1e-8)
        elif method == 'minmax':
            min_val = signal.min()
            max_val = signal.max()
            standardized = (signal - min_val) / (max_val - min_val + 1e-8)
        elif method == 'rank':
            standardized = signal.rank(pct=True) - 0.5
        else:
            raise ValueError(f"不支持的标准化方法: {method}")
    else:
        standardized = signal
    return standardized


# 因子中性化
def factor_neutralize(df: pd.DataFrame, factor_name='signal', method='Huber', netural='industry', epsilon=1.35,
                      max_iter=1000) -> pd.DataFrame:
    """
        因子行业中性化：消除因子对行业暴露，提纯因子选股能力。避免回测受历史行业超额表现影响。行业因素可能造成回测误判、行业轮动逆转时造成较大回撤.
        在截面进行因子中性化，因为行业分类本身变化频率低。历史数据采用当前行业分类
        方法：
            - Huber: Huber回归，降低极端值对于因子的影响。选股回归残差作为因子值
        参数：
        * df: 因子值， columns=[code, signal, industry_code]
        * industry_code: 行业成分股，columns=[]
        * netural: 因子中性化的方法，industry, lg_mktCap, industry_mktCap
    """
    df.rename(columns={factor_name: 'signal'}, inplace=True)

    if netural not in ['industry', 'lg_mktCap', 'industry_mktCap']:
        raise ValueError(f"不支持的因子中性化方法: {netural}")
    if method not in ['Huber']:
        raise ValueError(f"不支持的因子中性化回归方法: {method}")

    _f = None
    if netural == 'industry':
        _required_columns = {'code', 'signal', 'industry_code'}
        if not _required_columns.issubset(df.columns):
            missing_cols = _required_columns - set(df.columns)
            raise ValueError(f"缺失必要列: {missing_cols}")
        # 生成行业哑变量
        df1 = pd.get_dummies(df, columns=['industry_code'], prefix='dump_', drop_first=True)
        # 行业中性化
        if method == 'Huber':
            x = df1.filter(like='dump_').values
            y = df1['signal'].values
            huber = HuberRegressor(epsilon=epsilon, max_iter=max_iter)
            huber.fit(x, y)
            _f = y - huber.predict(x)
    elif netural == 'lg_mktCap':
        _required_columns = {'code', 'signal', 'circ_mv'}
        df1 = df.copy()
        df1['ln_cap'] = np.log(df1['circ_mv'])

        x = df1['ln_cap'].values
        y = df1['signal'].values
        if method == 'Huber':
            huber = HuberRegressor(epsilon=epsilon, max_iter=max_iter)
            _f = y - huber.predict(x)
    else:
        _required_columns = {'code', 'signal', 'industry_code', 'circ_mv'}
        df1 = pd.get_dummies(df, columns=['industry_code'], prefix='dump_', drop_first=True)
        df1['ln_cap'] = np.log(df1['circ_mv'])

        x = df1[[col for col in df1.columns if col.startswith('dump_')] + ['ln_cap']].values
        y = df1['signal'].values
        if method == 'Huber':
            huber = HuberRegressor(epsilon=epsilon, max_iter=max_iter)
            huber.fit(x, y)
            _f = y - huber.predict(x)

    df['signal_neutral'] = _f
    return df


# 对齐交易日期
def align_trade_date(signal: pd.DataFrame, engine, signal_columns=None) -> pd.DataFrame:
    """
        用于对齐因子值日期与交易日期，并对无值日期向下填充
        signal: 因子值dataframe, columns=['code', 'f_ann_date', 'end_date', 'signal']
        engine: 数据库连接串，用于获取行情数据
        signal_date: col_name，因子值数据中用于关联交易日的列名称
    """
    # 对齐交易日-因子值
    if signal_columns is None:
        signal_columns = ['signal']

    _sql1 = """
        select * from quant_research.basic_trading_date
    """
    tradings = pd.read_sql(_sql1, engine)

    signal['f_ann_date'] = pd.to_datetime(signal['f_ann_date']).dt.date
    tradings['cal_date'] = pd.to_datetime(tradings['cal_date']).dt.date

    def _align_tradings(group, _tradings):
        _start = group['f_ann_date'].min()
        _tradings = _tradings[(_tradings['cal_date'] >= _start) & (
                _tradings['cal_date'] <= datetime.datetime.today().date())].copy()  # 处理时间段

        _merge = pd.merge(
            group, _tradings,
            left_on='f_ann_date', right_on='cal_date', how='outer'
        )
        _columns = ['code', 'cal_date', 'end_date', 'is_open', 'pretrade_date'] + signal_columns
        m = _merge[_columns].ffill()  # 向后填充缺省值
        return m

    results_columns = ['code', 'f_ann_date', 'end_date'] + signal_columns
    results = signal.groupby('code', group_keys=False)[results_columns].apply(
        lambda group: _align_tradings(group, tradings))
    results.reset_index(inplace=True, drop=True)

    # 连接行情数据
    factor = results[results['is_open'] == 1].copy()
    factor.rename(columns={'cal_date': 'trading'}, inplace=True)

    # 左连接open, close数据
    start_date = factor['trading'].min().strftime("%Y-%m-%d")
    end_date = factor['trading'].max().strftime("%Y-%m-%d")
    _sql = f"""
        select ticker as code, trade_date as trading, open, close
        from quant_research.market_daily_ts
        where trade_date between '{start_date}' and '{end_date}'
    """

    price = pd.read_sql(_sql, con=engine)
    signals = pd.merge(factor, price, how='left', on=['code', 'trading'])
    return signals
