import pandas as pd

from app_config import get_engine, get_pro
import os


class O000688KC50(object):
    """Calculate the volatility of the last one year"""
    one_year_ago: str = None
    zero_year_ago: str = None
    """file output path"""
    folder_path: str = None
    """ for merge"""
    date_000688_kc50: str = None
    """SSE Science and Technology Innovation Board Composite Index(综合指数)"""
    date_000680_kcbz: str = None


def create_folder(folder_path):
    if not os.path.exists(folder_path):
        os.makedirs(folder_path)
        print(f"文件夹 '{folder_path}' 创建成功")


def calc_kc50(o_kc50: O000688KC50 | None = None):
    one_year_ago = o_kc50.one_year_ago
    zero_year_ago = o_kc50.zero_year_ago
    folder_path = o_kc50.folder_path
    date_000688_kc50 = o_kc50.date_000688_kc50
    date_000680_kcbz = o_kc50.date_000680_kcbz
    print(f"""
      folder_path: {"     " + folder_path}
      one_year_ago: {"    " + one_year_ago}
      zero_year_ago: {"   " + zero_year_ago}
      date_000688_kc50: {"" + date_000688_kc50}
      date_000680_kcbz: {"" + date_000680_kcbz}
      """)

    create_folder(folder_path)
    engine = get_engine()

    """ 
         1、sample space: same with <SSE Science and Technology Innovation Board Composite Index 000680>
     """
    df_sampleSpace = get_pro().index_weight(index_code='000680.SH', start_date=date_000680_kcbz,
                                            end_date=date_000680_kcbz)
    list_ts_code = df_sampleSpace['con_code'].tolist()
    df_sampleSpace.rename(columns={'con_code': 'ts_code'}, inplace=True)
    df_sampleSpace.to_excel(folder_path + "/1_sampleSpace.xlsx")

    sql_dail = f"""
    SELECT ts_code,trade_date, amount FROM `daily`
    WHERE ts_code IN ({','.join(f"'{code}'" for code in list_ts_code)})
    AND trade_date >= '{one_year_ago}'
    AND trade_date <= '{zero_year_ago}'
    """

    df_daily = pd.read_sql_query(sql_dail, engine)
    df_daily['amount'] = df_daily['amount'] / 1000
    df_avgAmount = df_daily.groupby('ts_code')['amount'].mean().reset_index()
    df_avgAmount.columns = ['ts_code', '日均成交额']
    df_sampleSpace_avgAmount = pd.merge(df_sampleSpace, df_avgAmount, on='ts_code', how='left')

    # 构建查询语句
    sql_dail_basic = f"""
        SELECT ts_code,trade_date, total_mv FROM `daily_basic`
        WHERE ts_code IN ({','.join(f"'{code}'" for code in list_ts_code)})
        AND trade_date >= '{one_year_ago}'
        AND trade_date <= '{zero_year_ago}'
        """
    df_daily_basic = pd.read_sql_query(sql_dail_basic, engine)
    df_daily_basic['total_mv'] = df_daily_basic['total_mv'] / 10000
    df_avgTotalMv = df_daily_basic.groupby('ts_code')['total_mv'].mean().reset_index()
    df_avgTotalMv.columns = ['ts_code', '日均总市值']
    df_sampleSpace_avgAmount_avgTotalMv = pd.merge(df_sampleSpace_avgAmount, df_avgTotalMv, on='ts_code', how='left')

    # 按 日均成交额 列降序排序
    df_sampleSpace_avgAmount_avgTotalMv_sorted = df_sampleSpace_avgAmount_avgTotalMv.sort_values(by='日均成交额', ascending=False).reset_index(drop=True)
    df_sampleSpace_avgAmount_avgTotalMv_sorted['i_avgAmount'] = df_sampleSpace_avgAmount_avgTotalMv_sorted.index
    # 获取前 90% 的数据
    num_rows = len(df_sampleSpace_avgAmount_avgTotalMv_sorted)
    df_sampleSpace_avgAmountTop90P_avgTotalMv = df_sampleSpace_avgAmount_avgTotalMv_sorted.head(int(num_rows * 0.9))

    df_sampleSpace_avgAmountTop90P_avgTotalMv = df_sampleSpace_avgAmountTop90P_avgTotalMv.sort_values('日均总市值', ascending=False).reset_index(drop=True)
    df_sampleSpace_avgAmountTop90P_avgTotalMv['i_totalMv'] = df_sampleSpace_avgAmountTop90P_avgTotalMv.index
    df_sampleSpace_avgAmountT90p_avgTotalMvT120 = df_sampleSpace_avgAmountTop90P_avgTotalMv.head(120)

    """@@@@@@@@@@@@@@@@ end @@@@@@@@@@@@@@@@@"""

    df_930955 = get_pro().index_weight(index_code='000688.SH', start_date=date_000688_kc50, end_date=date_000688_kc50)
    df_930955.rename(columns={'con_code': 'ts_code'}, inplace=True)
    df_930955.rename(columns={'trade_date': 'list_date'}, inplace=True)
    df_930955['symbol'] = df_930955['ts_code'].str.split('.').str[0]
    df_930955.drop(columns=['weight'], inplace=True)
    df_930955.to_excel(folder_path + "/" + date_000688_kc50 + '-000688.xlsx')

    # 获取 dataframe1 中的所有列
    columns_dataframe1 = df_sampleSpace_avgAmountT90p_avgTotalMvT120.columns

    # 创建缺失列并填充为 '*'
    for column in columns_dataframe1:
        if column not in df_930955.columns:
            df_930955[column] = '**'

    # 将 dataframe2 的列顺序调整为与 dataframe1 一致
    df_930955 = df_930955[columns_dataframe1]

    # 追加 dataframe2 到 dataframe1
    result = pd.concat([df_sampleSpace_avgAmountT90p_avgTotalMvT120, df_930955], ignore_index=True)

    result.to_excel(folder_path + '_.xlsx')


if __name__ == '__main__':
    _2025_1one = O000688KC50()
    _2024_1one = O000688KC50()
    _2023_1one = O000688KC50()
    _2022_1one = O000688KC50()
    # _2021_one = Student()

    _2024_3thr = O000688KC50()
    _2023_3thr = O000688KC50()
    _2022_3thr = O000688KC50()
    _2021_3thr = O000688KC50()

    _2024_4fou = O000688KC50()
    _2023_4fou = O000688KC50()
    _2022_4fou = O000688KC50()
    _2021_4fou = O000688KC50()

    _2025_2two = O000688KC50()
    _2024_2two = O000688KC50()
    _2023_2two = O000688KC50()
    _2022_2two = O000688KC50()
    _2021_2two = O000688KC50()

    """@@@@@@@@ 四季度 @@@@@@@@@@"""
    _2024_4fou.one_year_ago = '20231101'
    _2023_4fou.one_year_ago = '20221101'
    _2022_4fou.one_year_ago = '20211101'

    _2024_4fou.zero_year_ago = '20241031'
    _2023_4fou.zero_year_ago = '20231031'
    _2022_4fou.zero_year_ago = '20221031'

    _2024_4fou.date_000688_kc50 = '20240930'
    _2023_4fou.date_000688_kc50 = '20240131'
    _2022_4fou.date_000688_kc50 = '20230131'

    """@@@@@@@@ 三季度 @@@@@@@@@@"""
    _2024_3thr.one_year_ago = '20230801'
    _2023_3thr.one_year_ago = '20220801'
    _2022_3thr.one_year_ago = '20210801'

    _2024_3thr.zero_year_ago = '20240731'
    _2023_3thr.zero_year_ago = '20230731'
    _2022_3thr.zero_year_ago = '20220731'

    _2024_3thr.date_000688_kc50 = '20240628'
    _2023_3thr.date_000688_kc50 = '20231031'
    _2022_3thr.date_000688_kc50 = '20221031'

    """@@@@@@@@ 二季度 @@@@@@@@@@"""
    _2025_2two.one_year_ago = '20240501'
    _2024_2two.one_year_ago = '20230501'
    _2023_2two.one_year_ago = '2022501'

    _2025_2two.zero_year_ago = '20250430'
    _2024_2two.zero_year_ago = '20240430'
    _2023_2two.zero_year_ago = '20230430'

    _2025_2two.date_000688_kc50 = '20250430'
    _2024_2two.date_000688_kc50 = '20240628'
    _2023_2two.date_000688_kc50 = '20230731'

    """@@@@@@@@ 一季度 @@@@@@@@@@"""
    _2025_1one.one_year_ago = '20240201'
    _2024_1one.one_year_ago = '20230201'
    _2023_1one.one_year_ago = '20220201'

    _2025_1one.zero_year_ago = '20250131'
    _2024_1one.zero_year_ago = '20240131'
    _2023_1one.zero_year_ago = '20230131'

    _2025_1one.date_000688_kc50 = '20250127'
    _2024_1one.date_000688_kc50 = '20240430'
    _2023_1one.date_000688_kc50 = '20230428'

    _2025_1one.date_000680_kcbz = '20250127'
    _2025_2two.date_000680_kcbz = '20250430'

    _2024_4fou.folder_path = 'zfile/kc50_2024season4/'
    _2023_4fou.folder_path = 'zfile/kc50_2023season4/'
    _2022_4fou.folder_path = 'zfile/kc50_2022season4/'
    _2024_3thr.folder_path = 'zfile/kc50_2024season3/'
    _2023_3thr.folder_path = 'zfile/kc50_2023season3/'
    _2022_3thr.folder_path = 'zfile/kc50_2022season3/'
    _2025_2two.folder_path = 'zfile/kc50_2025season2/'
    _2023_2two.folder_path = 'zfile/kc50_2023season2/'
    _2025_1one.folder_path = 'zfile/kc50_2025season1/'
    _2024_1one.folder_path = 'zfile/kc50_2024season1/'
    _2023_1one.folder_path = 'zfile/kc50_2023season1/'

    calc_kc50(_2025_2two)

    # calc_kc50(_2024_1one)
    # calc_kc50(_2025_1one)
    # calc_kc50(_2023_1one)
    # calc_kc50(_2022_1one)
    # calc_kc50(_2021_1one)

    # calc_kc50(_2024_3thr)
    # calc_kc50(_2023_3thr)
    # calc_kc50(_2022_3thr)
    # calc_kc50(_2021_3thr)
    #
    # calc_kc50(_2024_4fou)
    # calc_kc50(_2023_4fou)
    # calc_kc50(_2022_4fou)
    # calc_kc50(_2021_4fou)

    # calc_kc50_2024_two)
    # calc_kc50_2023_two)
    # calc_kc50_2022_two)
    # calc_kc50_2021_two)
