import os
import pandas as pd
import time
from rqalpha.my_factors.reform_data import symbol_mapping, get_file_name_li
from rqalpha.my_factors.reform_data import get_source_tick_data
from rqalpha.utils.concurrent import ProgressedProcessPoolExecutor
from concurrent.futures import ThreadPoolExecutor, as_completed
import multiprocessing


def sub_run(select_type, data_path, the_date, _symbol, sys_freq, save_base_path):
    order_book_id = _symbol
    # print('order book id:', _symbol)
    for k, v in symbol_mapping.items():
        if k in _symbol:
            order_book_id = _symbol.replace(k, v)
            break
    # print('order book id mapping:', order_book_id)
    source_file_li, trans_file_li = get_file_name_li(select_type, the_date, the_date, data_path, _symbol)
    if len(source_file_li) != len(trans_file_li):
        print(order_book_id, 'skip --------------')
        return
    if len(source_file_li) == len(trans_file_li) == 0:
        print(order_book_id, ' no data, skip ------------')
        return
    ticks_data = get_source_tick_data(select_type, source_file_li, trans_file_li, order_book_id, sys_freq)
    # ticks_data = ticks_data.reset_index(drop=True)
    save_path = os.path.join(save_base_path, the_date, order_book_id + '.csv')
    ticks_data.to_csv(save_path, index=False)
    # time.sleep(500)


def run(all_symbols, select_type, data_path, sys_freq, the_date, save_base_path):
    with ProgressedProcessPoolExecutor(max_workers=8) as executor:
        for _symbol in all_symbols:
            executor.submit(sub_run, select_type=select_type, data_path=data_path, the_date=the_date,
                            _symbol=_symbol, sys_freq=sys_freq, save_base_path=save_base_path)
    # for _symbol in all_symbols:
    #     sub_run(select_type=select_type, data_path=data_path, the_date=the_date, _symbol=_symbol,
    #             sys_freq=sys_freq, save_base_path=save_base_path)


def main_run(select_type, start_date, end_date, data_path, sys_freq, save_base_path,
             symbols=None):
    date_li = pd.date_range(start_date, end_date, freq='d').strftime('%Y-%m-%d')
    if symbols is None:
        # df = pd.read_csv('./u20220426.csv', encoding='gbk')
        # symbols = df['s_con_windcode'].values.tolist()
        df = pd.read_csv('./stocks.csv', encoding='gbk')
        symbols = df['wind_code'].values.tolist()
    all_symbols = symbols
    for the_date in date_li:
        if not os.path.exists(os.path.join(save_base_path, the_date)):
            os.makedirs(os.path.join(save_base_path, the_date))
        run(all_symbols=all_symbols, select_type=select_type, data_path=data_path, sys_freq=sys_freq, the_date=the_date,
            save_base_path=save_base_path)


def single_load(file_path):
    # print(file_path)
    df = pd.read_csv(file_path)
    # print(len(df))
    return df


def merge_all_symbols_to_one_day_csv(start_date, end_date, save_base_path, all_symbols_merge_path):
    date_li = pd.date_range(start_date, end_date, freq='d').strftime('%Y-%m-%d')
    pool = multiprocessing.Pool(6)
    for the_date in date_li:
        print(the_date)
        file_name = os.path.join(all_symbols_merge_path, the_date + '.csv')
        save_day_path = os.path.join(save_base_path, the_date)
        file_name_li = [os.path.join(save_day_path, file_name) for file_name in os.listdir(save_day_path)]
        res = pool.map(single_load, file_name_li)
        if len(res) != 0:
            res_df = pd.concat(res, ignore_index=True)
            print('total length:', len(res_df))
            res_df.to_csv(file_name, index=False)
    pool.close()
    pool.join()


if __name__ == "__main__":
    select_type = 'stock'
    start_date = '2022-02-11'
    end_date = '2022-02-20'
    data_path = r'D:\BaiduNetdiskDownload\tick2'
    sys_freq = '60000ms'
    save_base_path = r'D:\BaiduNetdiskDownload\tick3'
    all_symbols_merge_path = r'D:\BaiduNetdiskDownload\tick4'
    symbols = None
    # main_run(select_type, start_date, end_date, data_path, sys_freq, save_base_path, symbols)

    _start_date = '2022-02-12'
    _end_date = '2022-02-20'
    merge_all_symbols_to_one_day_csv(_start_date, _end_date, save_base_path, all_symbols_merge_path)



































