# coding:utf-8
import os
import pandas as pd
import time
from rqalpha.apis import *
from rqalpha.utils.concurrent import ProgressedProcessPoolExecutor
from collections import OrderedDict
from rqalpha.my_strategys import my_first_strategy_file12 as ms
from rqalpha.my_factors.reform_data import symbol_mapping, get_file_name_li, get_file_name_li_from_merge_data_path
from rqalpha import run_func
from rqalpha.my_factors.reform_data import read_signal


def get_all_symbols():
    # df = pd.read_csv('./u20220426.csv', encoding='gbk')
    # return df['s_con_windcode'].values.tolist()[0:100]
    # df = pd.read_csv('./u20220426.csv', encoding='gbk')
    df = pd.read_csv('./stocks.csv', encoding='gbk')
    # print(df['wind_code'].values.tolist())
    # print(df['s_con_windcode'].values.tolist())
    # time.sleep(500)
    # return df['wind_code'].values.tolist()[2000:2050]
    # return df['s_con_windcode'].values.tolist()[2050:3000]
    # return df['s_con_windcode'].values.tolist()
    return df['wind_code'].values.tolist()


def vertical_sub_run(date_split_li, select_type, _symbol, my_defines, data_path, merge_data_path):
    order_book_id = _symbol
    print('order book id:', _symbol)
    for k, v in symbol_mapping.items():
        if k in _symbol:
            order_book_id = _symbol.replace(k, v)
            break
    print('order book id mapping:', order_book_id)
    my_defines['config.base'].update({'ori_symbol': _symbol})
    if select_type == 'd_currency':
        order_book_id = '000001.XSHE'
    my_defines['config.base'].update({'symbol': order_book_id})
    for sub_date in date_split_li:
        sub_start = sub_date[0]
        sub_end = sub_date[1]
        if merge_data_path is None:
            source_file_li, trans_file_li = get_file_name_li(select_type, sub_start, sub_end, data_path, _symbol)
            if len(source_file_li) != len(trans_file_li):
                print('skip --------------')
                continue
            if len(source_file_li) == len(trans_file_li) == 0:
                print(' no data, skip ------------')
                continue
            my_defines['config.base'].update({'source_file_li': source_file_li})
            my_defines['config.base'].update({'trans_file_li': trans_file_li})
            my_defines['config.base'].update({'merge_file_li': None})
        else:
            merge_file_li = get_file_name_li_from_merge_data_path(select_type, sub_start, sub_end,
                                                                  merge_data_path, order_book_id)
            my_defines['config.base'].update({'merge_file_li': merge_file_li})
        my_defines['config.base'].update({'start_date': sub_start, 'end_date': sub_end})
        # run_func(config=ms.config, init=ms.init, before_trading=ms.before_trading, handle_tick=ms.handle_tick,
        #          after_trading=ms.after_trading, my_defines=my_defines)
        ms.run_bt(config=ms.config, my_defines=my_defines)


def horizon_sub_run(start_date, end_date, my_defines, all_symbols, merge_data_path):
    my_defines['config.base'].update({'all_symbols': all_symbols})
    symbols_mapping = []
    symbols_mapping_dict = {}
    for _symbol in all_symbols:
        order_book_id = _symbol
        for k, v in symbol_mapping.items():
            if k in _symbol:
                order_book_id = _symbol.replace(k, v)
                break
        symbols_mapping.append(order_book_id)
        symbols_mapping_dict[_symbol] = order_book_id
    my_defines['config.base'].update({'all_symbols_mapping': symbols_mapping})
    my_defines['config.base'].update({'all_symbols_mapping_dict': symbols_mapping_dict})
    my_defines['config.base'].update({'_start': start_date, '_end': end_date})
    my_defines['config.base'].update({'merge_data_path': merge_data_path})
    my_defines['queue1'] = {}
    my_defines['queue2'] = {}
    my_defines['queue3'] = {}
    my_defines['queue4'] = {}
    my_defines['queue5'] = {}
    my_defines['is_first_trade'] = True
    my_defines['all_queue'] = []
    my_defines['factor_df'] = read_signal(symbols_mapping_dict)
    # for dt in pd.date_range(start_date, end_date, freq='d').strftime('%Y-%m-%d'):
    #     # print(dt)
    #     last_li = list(my_defines['queue1'].keys())
    #     my_defines['last_li'] = last_li
    #     my_defines['config.base'].update({'start_date': dt, 'end_date': dt})
    #     ms.run_bt(config=ms.config, my_defines=my_defines)
    last_li = list(my_defines['queue1'].keys())
    my_defines['last_li'] = last_li
    my_defines['config.base'].update({'start_date': start_date, 'end_date': end_date})
    ms.run_bt(config=ms.config, my_defines=my_defines)


def clear_incremental_result_file(run_method, increment_file_path):
    if run_method == 'horizon':
        import shutil
        if os.path.exists(increment_file_path):
            shutil.rmtree(increment_file_path)  # 递归删除文件夹


def _run(select_type, start_date, end_date, data_path, strategy_file, sys_freq, symbols=None,
         run_method='vertical', merge_data_path=None, increment_file_path=None):
    my_defines = {'config.base': {'source_data_path': data_path, 'strategy_file': strategy_file,
                                  'sys_freq': sys_freq,
                                  'select_type': select_type,
                                  'run_method': run_method
                                  }}
    date_li = pd.date_range(start_date, end_date, freq='d')
    df_date = pd.DataFrame(data=date_li, columns=['date'])
    df_date['day_of_week'] = df_date['date'].dt.dayofweek
    df_date['mask'] = df_date['day_of_week'] < df_date['day_of_week'].shift(1).fillna(df_date['day_of_week'])
    df_date['week_split'] = df_date['mask'].cumsum()
    df_date['date'] = pd.to_datetime(df_date['date']).dt.strftime('%Y-%m-%d')
    df = df_date.groupby(['week_split'], as_index=False)['date'].agg(['min', 'max'])
    df = df.reset_index(drop=True)
    date_split_li = df.values.tolist()
    print(date_split_li)
    if symbols is None:
        all_symbols = get_all_symbols()
    else:
        all_symbols = symbols
    clear_incremental_result_file(run_method, increment_file_path)
    if run_method == 'vertical':
        with ProgressedProcessPoolExecutor(max_workers=8) as executor:
            for _symbol in all_symbols:
                executor.submit(vertical_sub_run, date_split_li=date_split_li, select_type=select_type, _symbol=_symbol,
                                my_defines=my_defines, data_path=data_path, merge_data_path=merge_data_path)
        # for _symbol in all_symbols:
        #     vertical_sub_run(date_split_li=date_split_li, select_type=select_type, _symbol=_symbol,
        #                      my_defines=my_defines, data_path=data_path, merge_data_path=merge_data_path)
    elif run_method == 'horizon':
        horizon_sub_run(start_date=start_date, end_date=end_date,
                        my_defines=my_defines, all_symbols=all_symbols, merge_data_path=merge_data_path)
    else:
        pass


if __name__ == "__main__":
    import time
    t0 = time.time()
    _run('stock',
         # '2022-01-07',
         '2021-01-06',
         '2022-04-07',
         # r'C:\Users\huajia\Desktop\data',
         r'D:\BaiduNetdiskDownload\tick2',
         r'C:\Users\huajia\Desktop\rqalpha4\rqalpha\my_strategys\my_first_strategy_file12.py',
         '60000ms',
         # symbols=['688689.SH'],
         # symbols=["688071.SH", "688386.SH"],
         # symbols=["000028.SZ"],
         # symbols=["600415.SH"],
         # symbols=['000001.SZ', '000002.SZ'],
         # run_method='vertical',
         run_method='horizon',
         # ['002004.SZ'],
         # ['002468.SZ']
         merge_data_path=r'D:\BaiduNetdiskDownload\tick3',
         increment_file_path=r'C:\Users\huajia\Desktop\rqalpha4\rqalpha\incremental_result\persist'
         # merge_data_path=None
         )
    # _run('d_currency', '2022-02-26', '2022-02-26', r'C:\Users\huajia\Desktop\data',
    #      r'C:\Users\huajia\Desktop\rqalpha3\rqalpha\my_strategys\my_first_strategy_file2.py',
    #      '500ms', ["LUNA-USDT"])
    print(time.time() - t0)



