# _*_ coding: utf-8 _*_
# @Time : 2024/9/18 14:47
# @Author : Magic
# @File : Cycle.py
from scipy.signal import detrend
import numpy as np
import pandas as pd
import statsmodels.api as sm
import copy
from scipy.fftpack import rfft, irfft, rfftfreq
from metainfo import conn
from api.calculate_drawdown import fetch_index_price


def detrend(original_y):
    y = np.log10(original_y)
    fx = np.array(range(len(y)))
    fx = sm.add_constant(fx)
    est = sm.OLS(y.values, fx)
    est = est.fit()
    trend = pd.DataFrame(est.fittedvalues, index=y.index, columns=y.columns)
    trend = np.power(10, trend)
    cycle = original_y - trend

    applied_data = pd.concat([original_y, trend, cycle], axis=1)
    applied_data.columns = ['original_y', 'trend', 'cycle']
    return applied_data


def new_pass_filter(signal_df, dela_time=1, upper_duration=np.inf, lower_duration=0):
    signal = signal_df.iloc[:, 0].values

    # time = np.linspace(0, len(signal), len(signal))
    # W = fftfreq(signal.size, d=time[1] - time[0])

    # W = fftfreq(signal.size, d=dela_time)
    W = rfftfreq(signal.size, d=dela_time)
    f_signal = rfft(signal)

    # If our original signal time was in seconds, this is now in Hz
    cut_f_signal = f_signal.copy()
    # cut_f_signal[(W >= 2 / lower_duration)] = 0
    # cut_f_signal[(W > 2 / lower_duration)] = 0
    # cut_f_signal[(W < 2 / upper_duration)] = 0

    cut_f_signal[(W > 1 / lower_duration)] = 0
    cut_f_signal[(W < 1 / upper_duration)] = 0

    cut_signal = irfft(cut_f_signal)

    # plt.subplot(221)
    # plt.plot(time, signal)
    # plt.subplot(222)
    # plt.plot(W, f_signal)
    # plt.xlim(0, 10)
    # plt.subplot(223)
    # plt.plot(W, cut_f_signal)
    # plt.xlim(0, 10)
    # plt.subplot(224)
    # plt.plot(time, cut_signal)
    # plt.show()

    cut_signal_df = pd.DataFrame(cut_signal, index=signal_df.index, columns=signal_df.columns)

    return cut_signal_df


def ts2fs(data, add_len_multiple=None, add_len=None):
    # 优先multiple
    if add_len_multiple is not None:
        add_len = len(data) * add_len_multiple
    else:
        if add_len is None:
            # add_len_multiple = 1
            # add_len_multiple = 3
            # add_len_multiple = 7
            add_len_multiple = 31
            add_len = len(data) * add_len_multiple

    A_multiple = add_len / len(data) + 1

    if add_len != 0:
        new_data = add_zero(data, add_len)
    else:
        new_data = data.iloc[:, 0]

    data_freq = np.fft.fft(new_data) / (len(new_data) / 2)  # fft computing and normalization 归一化
    data_freq_abs = np.fft.fftshift(abs(data_freq)) * A_multiple
    data_freq_pha = np.fft.fftshift(
        np.degrees(np.arctan(-data_freq.real / data_freq.imag)))  # 获得相位 # Y.real 得到实部，一个array; Y.imag 得到虚部，一个array
    if len(new_data) % 2 == 1:  # 奇数
        axis_data_freq = np.linspace(-(len(new_data) - 1) / 2, (len(new_data) - 1) / 2, num=len(new_data))
    else:
        axis_data_freq = np.linspace(-len(new_data) / 2, len(new_data) / 2 - 1, num=len(new_data))

    axis_data_freq_duration = abs(len(new_data) / axis_data_freq)
    # data_freq = pd.DataFrame(
    #     [axis_data_freq.tolist(), data_freq_abs.tolist(), axis_data_freq_duration.tolist()]).T
    # data_freq.columns = ['Freq', 'FFT Abs', 'Duration']
    data_freq = pd.DataFrame(
        [axis_data_freq.tolist(), data_freq_abs.tolist(), axis_data_freq_duration.tolist(), data_freq_pha.tolist()]).T
    data_freq.columns = ['Freq', 'FFT Abs', 'Duration', 'Phase']

    data_freq = find_h_l(data_freq)

    return data_freq


def add_zero(data, add_len):
    s = pd.Series(np.zeros([1, add_len])[0, :])
    s.index = s.index - len(s.index)
    # 根据经验，增加的S的长度与data的比值缩放了画图的权重，即s和data一样长的话，画出的图振幅都小50%，前提是归一化是len(data)要对折

    if isinstance(data, pd.DataFrame):
        # data = (data.iloc[:, 0]).append(s)
        temp = data.iloc[:, 0]
        # 假设s也是一个Series
        data = pd.concat([temp, s], ignore_index=True)
    else:
        data = pd.concat([s, data], ignore_index=True)

    return data


def log_est(data_freq):
    max_return = np.power(10, data_freq.loc[:, 'FFT Abs']) - 1
    return max_return


def find_h_l(old_data_freq):
    # 产出H/L的信息
    # data_freq: 'Freq', 'FFT Abs', 'Duration', 'Phase', 'H/L'

    data_freq = copy.deepcopy(old_data_freq)

    data_freq['H/L'] = np.nan
    # data_freq.columns = ['Freq', 'FFT Abs', 'Duration', 'Phase', 'H/L']

    # 1表示延续趋势，0表示趋势间走平，2表示顶底区间内走平，，3表示顶底区间边界，4表示顶底点
    if True in set(np.isnan(data_freq.iloc[:, 1]).values):
        pass
    else:
        left = ((data_freq.iloc[1:-1, 1].values > data_freq.iloc[0:-2, 1].values) * 1 - 0.5) * 4
        right = ((data_freq.iloc[1:-1, 1].values > data_freq.iloc[2:, 1].values) * 1 - 0.5) * 4
        left_equal = (data_freq.iloc[1:-1, 1].values == data_freq.iloc[0:-2, 1].values) * 3
        right_equal = (data_freq.iloc[1:-1, 1].values == data_freq.iloc[2:, 1].values) * 3
        pre = left + right + left_equal + right_equal

        # pre_test = pd.DataFrame(pre.T)
        pre_trend = (left - right) / 4

        pre[pre == 0] = pre_trend[pre == 0]
        pre[pre == 2] = 0

        pre = pd.DataFrame(pre.T)

        pre_3_index_1 = (pre[pre[pre.columns[0]] == 3].index).tolist()
        pre_3_index_2 = (pre[pre[pre.columns[0]] == -3].index).tolist()
        pre_3_index_1.extend(pre_3_index_2)
        pre_3_index = list(set(pre_3_index_1))

        for i in range(0, len(pre_3_index) - 1):
            if pre_3_index[i] + 1 < pre_3_index[i + 1]:
                if pre.iloc[pre_3_index[i], 0] == pre.iloc[pre_3_index[i + 1], 0]:
                    pre.iloc[pre_3_index[i] + 1:pre_3_index[i + 1], 0] = 2 * np.sign(pre.iloc[pre_3_index[i], 0])

        data_freq.iloc[1:-1, -1] = pre.iloc[0:, 0].values

        # 默认为两头为拐点
        if data_freq.iloc[0, 1] > data_freq.iloc[1, 1]:
            data_freq.iloc[0, -1] = 4
        elif data_freq.iloc[0, 1] < data_freq.iloc[1, 1]:
            data_freq.iloc[0, -1] = -4
        else:
            if abs(data_freq.iloc[1, -1]) == 3:
                data_freq.iloc[0, -1] = 2 * np.sign(pre.iloc[1, 0])
            else:  # abs(data_freq.iloc[1, -1]) == 2 or 0
                data_freq.iloc[0, -1] = data_freq.iloc[1, -1]

        if data_freq.iloc[-1, 1] > data_freq.iloc[-2, 1]:
            data_freq.iloc[-1, -1] = 4
        elif data_freq.iloc[-1, 1] < data_freq.iloc[-2, 1]:
            data_freq.iloc[-1, -1] = -4
        else:
            if abs(data_freq.iloc[-2, -1]) == 3:
                data_freq.iloc[-1, -1] = 2 * np.sign(pre.iloc[-2, 0])
            else:  # abs(data_freq.iloc[-2, -1]) == 2 or 0
                data_freq.iloc[-1, -1] = data_freq.iloc[-2, -1]

    return data_freq


def __choose_windows(name='Hanning', N=20, restore_pattern='a'):
    # Rect/Hanning/Hamming
    # 振幅修正系数使加窗后振幅保证不变，a_coeff
    # 能量修正系数使加窗后能量保证不变，p_coeff

    if name == 'Hamming':
        a_coeff = 1.852
        p_coeff = 1.586
        window = np.array([0.54 - 0.46 * np.cos(2 * np.pi * n / (N - 1)) for n in range(N)])

    elif name == 'Hanning':
        a_coeff = 2
        p_coeff = 1.633
        window = np.array([0.5 - 0.5 * np.cos(2 * np.pi * n / (N - 1)) for n in range(N)])

    elif name == 'Rect':
        a_coeff = 1
        p_coeff = 1
        window = np.ones(N)

    else:
        a_coeff = 1
        p_coeff = 1
        window = np.ones(N)

    if restore_pattern.lower() == 'a':
        window = window * a_coeff
    else:
        window = window * p_coeff

    return window


def fetch_freq_info(old_y, pattern='normal', allow_trend=False, windows_name=None, add_len_multiple=None, add_len=None):
    # 获取频率数据
    # 假设此y已经经过比例或者对数处理
    # 先加窗，后加零

    inf_multiple = 2  # 计算平均振幅时，对于inf估算周期相对于最大周期数的倍数

    if windows_name is not None:
        windows = __choose_windows(windows_name, len(old_y))
        windows = pd.DataFrame(windows, columns=old_y.columns, index=old_y.index)
        y = old_y * windows
    else:
        y = copy.deepcopy(old_y)

    freq = ts2fs(y, add_len_multiple=add_len_multiple, add_len=add_len)  # 生成频率数据y

    # if pattern == 'ln':
    #     freq = ts2fs(np.log10(y))  # 生成频率数据y
    # else:
    #     freq = ts2fs(y)  # 生成频率数据y

    if pattern == 'ln':
        freq['Amplitude'] = log_est(freq)
    else:
        freq['Amplitude'] = freq['FFT Abs']

    freq['Unit Amplitude'] = freq['Amplitude'].values / freq['Duration'].values
    freq.loc[freq['Duration'] == np.inf, 'Unit Amplitude'] = freq[freq['Duration'] == np.inf]['Amplitude'].values / (
            inf_multiple * np.max(freq.loc[freq['Duration'] != np.inf, 'Duration']))

    # 存在小数点后不一致的问题，使得并未保持一致。
    freq_id_list = freq.loc[freq['FFT Abs'] == max(freq['FFT Abs']), 'Freq'].values.tolist()
    freq_id_unique = int(list(set(np.abs(freq_id_list)))[0])
    if freq_id_unique > max(freq['Freq']):
        freq_id_unique = freq_id_unique * -1

    # core_duration = freq.loc[freq['FFT Abs'] == max(freq['FFT Abs']), 'Duration']
    core_duration = freq.loc[freq['Freq'] == freq_id_unique, 'Duration']
    if core_duration.shape[0] == 0:
        core_duration = np.nan
    else:
        core_duration = core_duration.values[0]

    # 在数据边缘，存在可能freq_id_unique为负数
    if allow_trend:
        # core_duration_info = freq.loc[(freq['Freq'] == freq_id_unique) & (freq['Freq'] >= 0), :]
        core_duration_info = freq.loc[(freq['Freq'] == freq_id_unique), :]
    else:
        # core_duration_info = freq.loc[(freq['Freq'] == freq_id_unique) & (freq['Freq'] > 0), :]
        core_duration_info = freq.loc[(freq['Freq'] == freq_id_unique) & (freq['Freq'] != 0), :]

    if core_duration_info.shape[0] == 0:
        core_duration_info = pd.DataFrame(np.full([1, core_duration_info.shape[1]], np.nan), index=[0],
                                          columns=core_duration_info.columns)
        # core_duration_info['Duration'].iloc[-1] = np.inf

    return freq, core_duration, core_duration_info


def find_first_wave(freq_info, ts, threshold=0, start_id=1):
    # 获取第一段波

    not_zero_line = True
    right_start_freq = True
    cycle_data = pd.DataFrame()
    z = np.nan
    for z in range(start_id, len(freq_info)):
        cycle_data = new_pass_filter(ts, upper_duration=freq_info.iloc[0, 0], lower_duration=freq_info.iloc[z, 0])
        if False in set(np.abs(cycle_data.iloc[:, 0].values) <= threshold):
            cycle_data_freq_for_draw, cycle_data_duration_for_draw, _ = fetch_freq_info(cycle_data, 'normal',
                                                                                        allow_trend=True,
                                                                                        windows_name='Hanning')
            if freq_info.iloc[0, 0] > cycle_data_duration_for_draw >= freq_info.iloc[z, 0]:
                not_zero_line = False
                break
            elif freq_info.iloc[0, 0] == cycle_data_duration_for_draw:
                right_start_freq = False
                break

    if not not_zero_line:
        return right_start_freq, cycle_data, z, freq_info.iloc[z, 0]
    else:
        return right_start_freq, None, None, None


def auto_freq_range_detect(freq_info, ts, threshold=10 ** -3):
    # 根据相关性分析获取合理的边界
    filtered_freq_info = copy.deepcopy(freq_info)
    # filtered_freq_info.columns = ['qty', 'amplitude', 'unit amplitude']
    filtered_freq_info = filtered_freq_info.sort_index(ascending=True)  # 降序排列

    duration_range = pd.DataFrame(
        columns=['main duration', 'last effective duration', 'upper duration', 'lower duration'])

    flag = 0
    e_data = copy.deepcopy(ts)
    old_e_data = copy.deepcopy(ts)
    split_data = pd.DataFrame()
    last_end_index = np.inf
    while flag < len(filtered_freq_info):
        print(str(flag))
        print('Duration: ' + str(filtered_freq_info.index[flag]))
        # print('Qty: ' + str(filtered_freq_info['qty'].iloc[flag]))
        # print('Amplitude: ' + str(filtered_freq_info['amplitude'].iloc[flag]))
        # print('Unit Amplitude : ' + str(filtered_freq_info['unit amplitude'].iloc[flag]))

        right_start_freq, cycle_data, flag_delta, end_index = find_first_wave(filtered_freq_info.iloc[flag:, :],
                                                                              e_data, threshold=threshold, start_id=1)

        if flag_delta is None:
            if right_start_freq:
                break
            else:
                duration_range_temp = duration_range.iloc[[-1], :]
                duration_range = duration_range.iloc[:-1, :]

                old_flag = flag
                flag = np.where(filtered_freq_info.index == duration_range_temp['upper duration'].iloc[0])[0][0]
                right_start_freq, cycle_data, flag_delta, end_index = find_first_wave(
                    filtered_freq_info.iloc[flag:, :],
                    old_e_data, threshold=threshold,
                    start_id=old_flag - flag + 1)
                main_index = end_index
                # main_index = OMP_freq_info_all.iloc[flag:flag + flag_delta + 1, :]['amplitude'].idxmax()
                # main_index = OMP_freq_info_all.iloc[flag:flag + flag_delta + 1, :]['unit amplitude'].idxmax()
                # old_e_data = copy.deepcopy(e_data)
                e_data = old_e_data - cycle_data
                # cycle_data.columns = [str(main_index)]
                cycle_data.columns = [main_index]
                split_data = pd.concat([split_data.iloc[:, :-1], cycle_data], axis=1)
                last_end_index = filtered_freq_info.index[flag]
                duration_range_temp = pd.DataFrame([main_index, end_index, last_end_index, end_index],
                                                   index=duration_range.columns).T
                last_end_index = end_index
                duration_range = pd.concat([duration_range, duration_range_temp])
                flag = flag + flag_delta
        else:
            main_index = end_index
            # main_index = OMP_freq_info_all.iloc[flag:flag + flag_delta + 1, :]['amplitude'].idxmax()
            # main_index = OMP_freq_info_all.iloc[flag:flag + flag_delta + 1, :]['unit amplitude'].idxmax()
            old_e_data = copy.deepcopy(e_data)
            e_data = e_data - cycle_data
            # cycle_data.columns = [str(main_index)]
            cycle_data.columns = [main_index]
            split_data = pd.concat([split_data, cycle_data], axis=1)
            duration_range_temp = pd.DataFrame([main_index, end_index, last_end_index, end_index],
                                               index=duration_range.columns).T
            last_end_index = end_index
            duration_range = pd.concat([duration_range, duration_range_temp])
            flag = flag + flag_delta

    split_data_sum = pd.DataFrame(np.sum(split_data, axis=1), columns=e_data.columns)
    delta = ts - (pd.DataFrame(np.sum(split_data, axis=1), columns=e_data.columns) + e_data)
    is_same = not (False in set((np.abs(delta[delta.columns[0]]) < 0.1 ** 6).tolist()))
    print('分解结果相加是否和原始数据相同: ' + str(is_same))

    duration_range.reset_index(drop=True, inplace=True)
    # duration_range.reset_index(drop=True, inplace=True)
    split_data_sum.columns = ['main_cycle_sum']
    e_data.columns = ['residual']
    return split_data, split_data_sum, e_data, duration_range


def get_index_price(secucodes):
    """
    注意，获取的是月频数据
    """

    df = fetch_index_price(secucodes)
    secuabbr_dict = df[['secucode', 'secuabbr']].set_index('secucode').to_dict().get('secuabbr')
    df = df.drop('secuabbr', axis=1)  # 返回新DataFrame‌:ml-citation{ref="3,4" data="citationList"}

    df['enddate'] = pd.to_datetime(df['enddate'])
    # 按 secucode 分组，然后在每个组内按月分组并取最后一条记录
    result = df.groupby('secucode').apply(lambda x: x.groupby(x['enddate'].dt.to_period('M')).last())
    # 重置索引
    result = result.reset_index(drop=True)
    result.set_index(['secucode', 'enddate'], inplace=True)
    return result, secuabbr_dict


def get_freq_range():
    query_sql = '''
       select cycle_type,secucode,secuabbr,cycle_value from findfit_stock.major_assets_cycle 
    '''
    sql = query_sql
    df = pd.read_sql_query(sql, conn)
    df.set_index(["secucode", 'cycle_type'], inplace=True)
    df['cycle_value'] = pd.to_numeric(df['cycle_value'], errors='coerce')
    df = df['cycle_value'].dropna()
    return df


def expand_index_cycle(data, freq_info):
    detrended_data = detrend(data)
    cycle_data = np.log10(pd.DataFrame(detrended_data['original_y'] / detrended_data['trend'].values))
    cycle_data.columns = ['cycle_data']
    split_data, split_data_sum, e_data, duration_range = auto_freq_range_detect(freq_info, cycle_data,
                                                                                threshold=10 ** -3)

    # right_start_freq, splited_cycle_data, _, _ = find_first_wave(freq_info, cycle_data, threshold=10 ** -3,
    #                                                              start_id=1)
    col_list = []
    for i in range(split_data.shape[1]):
        col_list.append(str(int(np.round(fetch_freq_info(split_data.iloc[:, i])[1], 0))))
    split_data.columns = col_list
    res = pd.concat([detrended_data, split_data, split_data_sum, e_data], axis=1)

    return res


def expand_cycle_data(secucodes):
    close_price, secuabbr_dict = get_index_price(secucodes)  # 获取收盘价
    freq_range = get_freq_range()  # 获取分解频率数据
    asset_df = close_price.index.get_level_values('secucode').unique().to_list()  # 获取所有的可分解的代码

    result = {}
    for secucode in asset_df:
        close_price_data = close_price.loc[secucode]
        freq_data = freq_range.loc[secucode].to_frame()
        item_result = expand_index_cycle(close_price_data, freq_data)
        result[secucode] = item_result
    return result, secuabbr_dict


def gen_list(data):
    # print('data:', data, type(data))
    list_of_maps = []
    for index, value in data.items():
        map_entry = {'key': index.strftime('%Y-%m-%d'), 'value': value}
        list_of_maps.append(map_entry)
    return list_of_maps


def resolve_data(data):
    # print(data)
    label_map = {
        'cycle': '周期数据',
        'trend': '趋势数据',
        'original_y': '原始数据',
    }
    axisList = []
    res = {
        'axisTitle': '',
        'axis': {
            'xaxis': [],
            'yaxis': []
        }
    }
    for secucode in data:
        # print(secucode, type(data[secucode]), data[secucode].columns, data[secucode].index)
        res['axisTitle'] = secucode
        res['axis']['xaxis'] = data[secucode].index.astype(str)
        for col in data[secucode].columns:
            # print('label_map[col]:', label_map[col])
            res['axis']['yaxis'].append({
                'label': label_map[col] if col in label_map else col,
                'value': gen_list(data[secucode][col])
            })
        axisList.append(res)
    # print(axisList)
    return {
        'res': axisList
    }

def resolve_result(data, secuabbr_dict):
    res = {}
    for secucode in data:
        data[secucode].index = data[secucode].index.strftime('%Y-%m-%d')
        res[secuabbr_dict[secucode]] = data[secucode]
    return res


def chart_data():
    result, secuabbr_dict = expand_cycle_data(["000906.SH", "518880.SH"])
    # print(result, secuabbr_dict)
    return resolve_result(result, secuabbr_dict)
    # return resolve_data(result)

chart_data()
# if __name__ == '__main__':
#     result, secuabbr_dict = expand_cycle_data(["000906.SH","518880.SH"])
#     chart_data(result, secuabbr_dict)
# if __name__ == '__main__':
#     # base_folder = 'D://BaiduNetdiskDownload/慧度/报告/'
#
#     freq = expand_cycle_data(['hello'])
#
#     base_folder = '/Users/lishuang/Desktop/codes/python_project/ffquant/data/cycle/'
#     import_filename = '大类资产分频数据.xlsx'
#     asset_df = pd.read_excel(base_folder + import_filename, sheet_name='BenchID', index_col=0)
#     range_df = pd.read_excel(base_folder + import_filename, sheet_name='Freq_Range', index_col=0)
#     export_filename = '报告数据.xlsx'
#
#     res_list = []
#
#     for asset_id in range(len(asset_df)):
#         benchid = asset_df['BenchID'].iloc[asset_id]
#         name = asset_df['名称'].iloc[asset_id]
#         start_date = asset_df['起始时间'].iloc[asset_id]
#         freq_info = range_df[[name]].dropna()
#         freq_info = freq_info.set_index(keys=[name], drop=False)
#
#         # 获取数据代码
#         # data = pd.DataFrame()
#         data = pd.read_excel(base_folder + '000906.SH_close_M_2004-12-01_2024-06-30.xlsx', sheet_name='export_result',
#                              index_col=0)
#         data = data[data.index >= start_date]
#
#         # 去趋势
#         detrended_data = detrend(data)
#         cycle_data = np.log10(pd.DataFrame(detrended_data['original_y'] / detrended_data['trend'].values))
#         cycle_data.columns = ['cycle_data']
#         split_data, split_data_sum, e_data, duration_range = auto_freq_range_detect(freq_info, cycle_data,
#                                                                                     threshold=10 ** -3)
#
#         # right_start_freq, splited_cycle_data, _, _ = find_first_wave(freq_info, cycle_data, threshold=10 ** -3,
#         #                                                              start_id=1)
#
#         col_list = []
#         for i in range(split_data.shape[1]):
#             col_list.append(str(int(np.round(fetch_freq_info(split_data.iloc[:, i])[1], 0))))
#         split_data.columns = col_list
#         res = pd.concat([detrended_data, split_data, split_data_sum, e_data], axis=1)
#
#         res_list.append(res)
#
#     # 保存数据
#     with pd.ExcelWriter(base_folder + export_filename) as writer:
#         for asset_id in range(len(asset_df)):
#             res_list[asset_id].to_excel(writer, sheet_name=asset_df['名称'].iloc[asset_id])
#
#     print('hello world')
