# -*- coding: utf-8 -*-
#
# Copyright 2017 zhangxp, st
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import datetime
import json
import os
import pickle
import re
from itertools import chain

import h5py
import numpy as np
import pandas as pd

from rqalpha.utils.concurrent import (ProgressedProcessPoolExecutor,
                                      ProgressedTask)
from rqalpha.utils.datetime_func import (convert_date_to_date_int, convert_dt_to_int,
                                         convert_date_to_int, convert_int_to_date)
from rqalpha.utils.config import rqalpha_path

from rqalpha_mod_stu_api.data_source import St_Database_KDataSource
from rqalpha_update_bundle.wind.dump_wind_data import load_inst_info, load_inst_kline


START_DATE = 20050104
END_DATE = 29991231

rqalpha_bundle_path = rqalpha_path + '/bundle'
default_bundle_path = os.path.abspath(os.path.expanduser(rqalpha_bundle_path))
rqdatac = St_Database_KDataSource(default_bundle_path, data_source='ifind')


# def gen_instruments(d):
#     stocks = sorted(list(rqdatac.all_instruments().order_book_id))
#     instruments = [i.__dict__ for i in rqdatac.instruments(stocks)]
#     with open(os.path.join(d, 'instruments.pk'), 'wb') as out:
#         pickle.dump(instruments, out, protocol=2)

def upd_instruments(d):
    with open(os.path.join(d, 'instruments.pk'), 'rb') as f:
        instruments = pickle.load(f)
    instruments_all = rqdatac.all_instruments()
    # 更新新股
    new_stock_list = [i['order_book_id'] for i in instruments if i['listed_date'] == '2999-12-31' and i['de_listed_date'] == '0000-00-00' and i['type'] == 'CS']
    instruments_new = instruments_all[instruments_all['order_book_id'].isin(new_stock_list)]
    if len(instruments_new) > 0:
        for idx, row in instruments_new.iterrows():
            for i in range(0, len(instruments)):
                if instruments[i]['order_book_id'] == row['order_book_id']:
                    break
            print(instruments[i])
            instruments[i].update({'status':'Active', 'listed_date':row['listed_date']})
            print(instruments[i])
        with open(os.path.join(d, 'instruments.pk'), 'wb') as out:
            pickle.dump(instruments, out, protocol=2)
    # 新增期货
    start_date = datetime.date.today().replace(day=1)
    instruments_add = instruments_all[~instruments_all['order_book_id'].isin([i['order_book_id'] for i in instruments])]
    instruments_add = instruments_add[(instruments_add['listed_date'] >= str(start_date)) & (instruments_add['type'] == 'Future')]
    if len(instruments_add) > 0:
        for idx, row in instruments_add.iterrows():
            for i in range(0, len(instruments)):
                if instruments[i]['order_book_id'] < row['order_book_id'] and instruments[i+1]['order_book_id'] > row['order_book_id']:
                    break
            print(instruments[i])
            inst = instruments[i].copy()
            if inst['type'] in ('Future',):
                inst.update({'symbol':row['symbol'], 'order_book_id':row['order_book_id'], 'listed_date':row['listed_date'], 'de_listed_date':row['de_listed_date']})
                inst.update({'maturity_date':row['de_listed_date'], 'trading_code':row['abbrev_symbol']})
                print(inst)
            instruments.insert(i+1, inst)
        with open(os.path.join(d, 'instruments.pk'), 'wb') as out:
            pickle.dump(instruments, out, protocol=2)


# def gen_yield_curve(d):
#     yield_curve = rqdatac.get_yield_curve(start_date=START_DATE, end_date=datetime.date.today())
#     yield_curve.index = [convert_date_to_date_int(d) for d in yield_curve.index]
#     yield_curve.index.name = 'date'
#     with h5py.File(os.path.join(d, 'yield_curve.h5'), 'w') as f:
#         f.create_dataset('data', data=yield_curve.to_records())

def upd_yield_curve(d):
    with h5py.File(os.path.join(d, 'yield_curve.h5'), 'a') as f:
        data = f['data']
        last_data = data[-1]
        next_day = rqdatac.get_next_trading_date(last_data[0])
        days = rqdatac.get_trading_dates(start_date=next_day, end_date=datetime.date.today())
        for day in days:
            last_data.tolist()
            d = [convert_date_to_date_int(day)] + [last_data[i+1] for i in range( len(last_data) - 1 ) ]
            nd = np.asarray(d).reshape(1,22)
            data = np.array(
                [tuple(i) for i in chain(data[:], nd)],
                dtype=data.dtype
            )
        del f['data']
        f.create_dataset('data', data=data)

# def gen_trading_dates(d):
#     dates = rqdatac.get_trading_dates(start_date=START_DATE, end_date='2999-01-01')
#     dates = np.array([convert_date_to_date_int(d) for d in dates])
#     np.save(os.path.join(d, 'trading_dates.npy'), dates, allow_pickle=False)

def upd_trading_dates(d):
    dates = np.load(os.path.join(d, 'trading_dates.npy'), allow_pickle=False)
    new_dates = rqdatac.get_trading_dates(start_date=START_DATE, end_date='2999-01-01')
    new_dates = np.array([convert_date_to_date_int(d) for d in new_dates])
    if new_dates.max() > dates.max() or new_dates.min() < dates.min():
        np.save(os.path.join(d, 'trading_dates.npy'), new_dates, allow_pickle=False)

# def gen_st_days(d):
#     from rqdatac.client import get_client
#     stocks = rqdatac.all_instruments('CS').order_book_id.tolist()
#     st_days = get_client().execute('get_st_days', stocks, START_DATE,
#                                    convert_date_to_date_int(datetime.date.today()))
#     with h5py.File(os.path.join(d, 'st_stock_days.h5'), 'w') as h5:
#         for order_book_id, days in st_days.items():
#             h5[order_book_id] = days

def upd_st_days(d):
    if not os.path.exists(os.path.join(d, 'st_stock_days.h5')):
        stocks = rqdatac.all_instruments('CS').order_book_id.tolist()
        today = datetime.date.today()
        df_days = rqdatac.get_st_days(stocks, START_DATE, today)
        if df_days is None:
            return
        with h5py.File(os.path.join(d, 'st_stock_days.h5'), 'w') as h5:
            for order_book_id, st_days in df_days.groupby('security'):
                days = st_days[st_days['is_st'] == 1]
                if len(days) > 0:
                    days_int = [convert_date_to_date_int(d) for d in days['date'].tolist()]
                    data = np.array(
                        days_int, dtype=np.int32
                    )
                else:
                    data = np.array([], dtype=np.int32)
                h5[order_book_id] = data
    else:
        with h5py.File(os.path.join(d, 'st_stock_days.h5'), 'a') as h5:
            stocks = [k for k in h5.keys()]
            today = datetime.date.today()
            st_days = rqdatac.get_st_days(stocks, today, today)
            if st_days is None:
                return
            st_days = st_days[st_days['is_st'] == 1]
            for idx,row in st_days.iterrows():
                days = h5[row['security']]
                if len(days) == 0:
                    continue
                int_day = convert_date_to_date_int(today)
                assert(days[0] >= days[-1])
                if int_day > days[0]:
                    prev_day = rqdatac.get_previous_trading_date(today)
                    last_day = convert_int_to_date(days[0]).date()
                    if prev_day > last_day:  # 好几天没更新
                        next_day = rqdatac.get_next_trading_date(last_day)
                        df_days = rqdatac.get_st_days([row['security']], next_day, today)
                        df_days = df_days[df_days['is_st'] == 1]
                        days_int = [convert_date_to_date_int(d) for d in df_days['date'].tolist()]
                    else:
                        days_int = [int_day]
                    data = np.array(
                        [i for i in chain(days_int, days[:])],
                        dtype=days.dtype
                    )
                    del h5[row['security']]
                    h5.create_dataset(row['security'], data=data)


# def gen_suspended_days(d):
#     from rqdatac.client import get_client
#     stocks = rqdatac.all_instruments('CS').order_book_id.tolist()
#     suspended_days = get_client().execute('get_suspended_days', stocks, START_DATE,
#                                           convert_date_to_date_int(datetime.date.today()))
#     with h5py.File(os.path.join(d, 'suspended_days.h5'), 'w') as h5:
#         for order_book_id, days in suspended_days.items():
#             h5[order_book_id] = days


def upd_suspended_days(d):
    if not os.path.exists(os.path.join(d, 'suspended_days.h5')):
        stocks = rqdatac.all_instruments('CS').order_book_id.tolist()
        today = datetime.date.today()
        df_days = rqdatac.get_suspended_days(stocks, START_DATE, today)
        if df_days is None:
            return
        with h5py.File(os.path.join(d, 'suspended_days.h5'), 'w') as h5:
            for order_book_id, susp_days in df_days.groupby('security'):
                days = susp_days[susp_days['paused'] > 0.00001]
                if len(days) > 0:
                    days_int = [convert_date_to_date_int(d) for d in days['gen_time'].tolist()]
                    data = np.array(
                        days_int, dtype=np.int32
                    )
                else:
                    data = np.array([], dtype=np.int32)
                h5[order_book_id] = data
    else:
        with h5py.File(os.path.join(d, 'suspended_days.h5'), 'a') as h5:
            stocks = [k for k in h5.keys()]
            today = datetime.date.today()
            # today = rqdatac.get_previous_trading_date(today)
            susp_days = rqdatac.get_suspended_days(stocks, today, today)
            if susp_days is None:
                return
            susp_days = susp_days[susp_days['paused'] > 0.00001]
            for idx,row in susp_days.iterrows():
                days = h5[row['security']]
                if len(days) == 0:
                    continue
                int_day = convert_date_to_date_int(today)
                assert(days[-1] >= days[0])
                if int_day > days[-1]:
                    prev_day = rqdatac.get_previous_trading_date(today)
                    last_day = convert_int_to_date(days[-1]).date()
                    if prev_day > last_day:  # 好几天没更新
                        next_day = rqdatac.get_next_trading_date(last_day)
                        df_days = rqdatac.get_suspended_days([row['security']], next_day, today)
                        df_days = df_days[df_days['paused'] > 0.00001]
                        days_int = [convert_date_to_date_int(d.date()) for d in df_days['gen_time'].tolist()]
                    else:
                        days_int = [int_day]
                    data = np.array(
                        [i for i in chain(days[:], days_int)],
                        dtype=days.dtype
                    )
                    del h5[row['security']]
                    h5.create_dataset(row['security'], data=data)


# def gen_dividends(d):
#     stocks = rqdatac.all_instruments().order_book_id.tolist()
#     dividend = rqdatac.get_dividend(stocks)
#     dividend.reset_index(inplace=True)
#     dividend.rename(columns={'declaration_announcement_date': 'announcement_date'}, inplace=True)
#     for f in ('book_closure_date', 'ex_dividend_date', 'payable_date', 'announcement_date'):
#         dividend[f] = [convert_date_to_date_int(d) for d in dividend[f]]
#     dividend.set_index(['order_book_id', 'book_closure_date'], inplace=True)
#     with h5py.File(os.path.join(d, 'dividends.h5'), 'w') as h5:
#         for order_book_id in dividend.index.levels[0]:
#             h5[order_book_id] = dividend.loc[order_book_id].to_records()


def upd_dividends(d):
    if not os.path.exists(os.path.join(d, 'dividends.h5')):
        stocks = rqdatac.all_instruments('CS').order_book_id.tolist()
        today = datetime.date.today()
        dividends = rqdatac.get_dividend(stocks, START_DATE, today)
        if dividends is None:
            return
        with h5py.File(os.path.join(d, 'dividends.h5'), 'w') as h5:
            dividends = dividends[~dividends['payable_date'].isna()]
            for book_id,group in dividends.groupby('order_book_id'):
                group.rename(columns={'declaration_announcement_date': 'announcement_date'}, inplace=True)
                for f in ('book_closure_date', 'ex_dividend_date', 'payable_date', 'announcement_date'):
                    group[f] = [convert_date_to_date_int(d) for d in group[f]]
                group.set_index(['order_book_id', 'book_closure_date'], inplace=True)
                group = group[~group['dividend_cash_before_tax'].isna()]
                if len(group) > 0:
                    records = group.loc[book_id].to_records()
                    data = np.array(records)
                else:
                    data = np.array([])
                h5[book_id] = data
    else:
        with h5py.File(os.path.join(d, 'dividends.h5'), 'a') as h5:
            stocks = [k for k in h5.keys()]
            olds = h5[stocks[0]][:]
            today = datetime.date.today()
            start_date = today.replace(month=1, day=1)
            end_date = today.replace(month=12, day=31)
            # 获取近期数据
            dividends = rqdatac.get_dividend(stocks, start_date, end_date)
            if dividends is None:
                return
            dividends = dividends[(~dividends['payable_date'].isna()) & (dividends['payable_date'] > start_date)]
            for book_id,group in dividends.groupby('order_book_id'):
                group.rename(columns={'declaration_announcement_date': 'announcement_date'}, inplace=True)
                for f in ('book_closure_date', 'ex_dividend_date', 'payable_date', 'announcement_date'):
                    group[f] = [convert_date_to_date_int(d) for d in group[f]]
                group.set_index(['order_book_id', 'book_closure_date'], inplace=True)
                if book_id in h5:
                    last_day = h5[book_id][-1][4]
                    group = group[group['payable_date'] > last_day]
                    olds = h5[book_id][:]
                    # 日期是否有更新
                    if len(group) > 0:
                        records = group.loc[book_id].to_records()
                        data = np.array(
                            [i for i in chain(olds, records)],
                            dtype=olds.dtype
                        )
                        del h5[book_id]
                        h5.create_dataset(book_id, data=data)
                else:
                    records = group.loc[book_id].to_records()
                    data = np.array(
                        [i for i in records],
                        dtype=olds.dtype
                    )
                    h5.create_dataset(book_id, data=data)


def gen_splits(d):
    stocks = rqdatac.all_instruments().order_book_id.tolist()
    split = rqdatac.get_split(stocks)
    split['split_factor'] = split['split_coefficient_to'] / split['split_coefficient_from']
    split = split[['split_factor']]
    split.reset_index(inplace=True)
    split.rename(columns={'ex_dividend_date': 'ex_date'}, inplace=True)
    split['ex_date'] = [convert_date_to_int(d) for d in split['ex_date']]
    split.set_index(['order_book_id', 'ex_date'], inplace=True)

    with h5py.File(os.path.join(d, 'split_factor.h5'), 'w') as h5:
        for order_book_id in split.index.levels[0]:
            h5[order_book_id] = split.loc[order_book_id].to_records()


# def gen_ex_factor(d):
#     stocks = rqdatac.all_instruments().order_book_id.tolist()
#     ex_factor = rqdatac.get_ex_factor(stocks)
#     ex_factor.reset_index(inplace=True)
#     ex_factor['ex_date'] = [convert_date_to_int(d) for d in ex_factor['ex_date']]
#     ex_factor.rename(columns={'ex_date': 'start_date'}, inplace=True)
#     ex_factor.set_index(['order_book_id', 'start_date'], inplace=True)
#     ex_factor = ex_factor[['ex_cum_factor']]

#     dtype = ex_factor.loc[ex_factor.index.levels[0][0]].to_records().dtype
#     initial = np.empty((1,), dtype=dtype)
#     initial['start_date'] = 0
#     initial['ex_cum_factor'] = 1.0

#     with h5py.File(os.path.join(d, 'ex_cum_factor.h5'), 'w') as h5:
#         for order_book_id in ex_factor.index.levels[0]:
#             h5[order_book_id] = np.concatenate([initial, ex_factor.loc[order_book_id].to_records()])


def upd_ex_factor(d):
    if not os.path.exists(os.path.join(d, 'ex_cum_factor.h5')):
        stocks = rqdatac.all_instruments('CS').order_book_id.tolist()
        with h5py.File(os.path.join(d, 'ex_cum_factor.h5'), 'w') as h5:
            for book_id in stocks:
                start_date = convert_int_to_date(START_DATE).date()
                end_date = datetime.date.today()
                ex_factor = rqdatac.get_ex_factor(book_id, start_date, end_date)
                if ex_factor is None:
                    continue
                if len(ex_factor) > 0:
                    if ex_factor['ex_date'].iloc[-1].year < 2010:   # 退市等数据
                        continue
                    ex_factor['ex_date'] = [convert_date_to_int(d) for d in ex_factor['ex_date']]
                    ex_factor = ex_factor.dropna().set_index(['ex_date'])
                    ex_factor = ex_factor[['ex_cum_factor']]
                    records = ex_factor.to_records()
                    data = np.array(records)
                    h5.create_dataset(book_id, data=data)
    else:
        with h5py.File(os.path.join(d, 'ex_cum_factor.h5'), 'a') as h5:
            book_ids = [k for k in h5.keys()]
            for book_id in book_ids:
                last_date = h5[book_id][-1][0]
                start_date = convert_int_to_date(last_date).date()
                end_date = datetime.date.today()
                # 三个月内已经更新跳过
                if start_date.year == end_date.year and end_date.month - start_date.month < 3:
                    continue
                ex_factor = rqdatac.get_ex_factor(book_id, start_date, end_date)
                if ex_factor is None:
                    continue
                if len(ex_factor) > 1:
                    if ex_factor['ex_date'].iloc[-1].year < 2010:   # 退市等数据
                        continue
                    last_factor = h5[book_id][-1][1]
                    ex_factor['ex_cum_factor'] = ex_factor['ex_cum_factor'] / ex_factor['ex_cum_factor'].shift(1) * last_factor
                    ex_factor['ex_date'] = [convert_date_to_int(d) for d in ex_factor['ex_date']]
                    ex_factor = ex_factor.dropna().set_index(['ex_date'])
                    ex_factor = ex_factor[['ex_cum_factor']]
                    olds = h5[book_id][:]
                    records = ex_factor.to_records()
                    data = np.array(
                        [i for i in chain(olds, records)],
                        dtype=olds.dtype
                    )
                    del h5[book_id]
                    h5.create_dataset(book_id, data=data)


def gen_share_transformation(d):
    df = rqdatac.get_share_transformation()
    df.drop_duplicates("predecessor", inplace=True)
    df.set_index('predecessor', inplace=True)
    df.effective_date = df.effective_date.astype(str)
    df.predecessor_delisted_date = df.predecessor_delisted_date.astype(str)

    json_file = os.path.join(d, 'share_transformation.json')
    with open(json_file, 'w') as f:
        f.write(df.to_json(orient='index'))


def init_future_info(d):
    all_futures_info = []
    underlying_symbol_list = []
    fields = ['close_commission_ratio', 'close_commission_today_ratio', 'commission_type', 'open_commission_ratio']

    futures_order_book_id = rqdatac.all_instruments(type='Future')['order_book_id'].unique()
    for future in futures_order_book_id:
        future_dict = {}
        underlying_symbol = re.match(r'^[a-zA-Z]*', future).group()
        commission = rqdatac.futures.get_commission_margin(future)
        if not commission.empty:
            future_dict['order_book_id'] = future
            commission = commission.iloc[0]
            for p in fields:
                future_dict[p] = commission[p]
            future_dict['tick_size'] = rqdatac.instruments(future).tick_size()
        elif underlying_symbol not in underlying_symbol_list:
            if underlying_symbol in {'S', 'TC', 'ER', 'WS', 'WT', 'RO', 'ME'}:
                continue
            underlying_symbol_list.append(underlying_symbol)
            future_dict['underlying_symbol'] = underlying_symbol
            try:
                dominant = rqdatac.futures.get_dominant(underlying_symbol).iloc[-1]
            except AttributeError:
                # FIXME: why get_dominant return None???
                continue
            commission = rqdatac.futures.get_commission_margin(dominant).iloc[0]
            for p in fields:
                future_dict[p] = commission[p]
            future_dict['tick_size'] = rqdatac.instruments(dominant).tick_size()
        else:
            continue
        all_futures_info.append(future_dict)

    hard_info = [{'underlying_symbol': 'TC',
                  'close_commission_ratio': 4.0,
                  'close_commission_today_ratio': 0.0,
                  'commission_type': "by_volume",
                  'open_commission_ratio': 4.0,
                  'tick_size': 0.2},
                 {'underlying_symbol': 'ER',
                  'close_commission_ratio': 2.5,
                  'close_commission_today_ratio': 2.5,
                  'commission_type': "by_volume",
                  'open_commission_ratio': 2.5,
                  'tick_size': 1.0},
                 {'underlying_symbol': 'WS',
                  'close_commission_ratio': 2.5,
                  'close_commission_today_ratio': 0.0,
                  'commission_type': "by_volume",
                  'open_commission_ratio': 2.5,
                  'tick_size': 1.0},
                 {'underlying_symbol': 'RO',
                  'close_commission_ratio': 2.5,
                  'close_commission_today_ratio': 0.0,
                  'commission_type': "by_volume",
                  'open_commission_ratio': 2.5,
                  'tick_size': 2.0},
                 {'underlying_symbol': 'ME',
                  'close_commission_ratio': 1.4,
                  'close_commission_today_ratio': 0.0,
                  'commission_type': "by_volume",
                  'open_commission_ratio': 1.4,
                  'tick_size': 1.0}]

    all_futures_info += hard_info

    with open(os.path.join(d, 'future_info.json'), 'w') as f:
        json.dump(all_futures_info, f, separators=(',', ':'), indent=2)


def gen_future_info(d):
    future_info_file = os.path.join(d, 'future_info.json')
    if not os.path.exists(future_info_file):
        init_future_info(d)
        return

    with open(future_info_file, 'r') as f:
        all_futures_info = json.load(f)

    future_list = []
    symbol_list = []
    param = ['close_commission_ratio', 'close_commission_today_ratio', 'commission_type', 'open_commission_ratio']

    for i in all_futures_info:
        if i.get('order_book_id'):
            future_list.append(i.get('order_book_id'))
        else:
            symbol_list.append(i.get('underlying_symbol'))

    futures_order_book_id = rqdatac.all_instruments(type='Future')['order_book_id'].unique()
    for future in futures_order_book_id:
        underlying_symbol = re.match(r'^[a-zA-Z]*', future).group()
        if future in future_list:
            continue
        future_dict = {}
        commission = rqdatac.futures.get_commission_margin(future)
        if not commission.empty:
            future_list.append(future)
            future_dict['order_book_id'] = future
            commission = commission.iloc[0]
            for p in param:
                future_dict[p] = commission[p]
            future_dict['tick_size'] = rqdatac.instruments(future).tick_size()
        elif underlying_symbol in symbol_list \
                or underlying_symbol in {'S', 'TC', 'ER', 'WS', 'WT', 'RO', 'ME'}:
            continue
        else:
            symbol_list.append(underlying_symbol)
            future_dict['underlying_symbol'] = underlying_symbol
            try:
                dominant = rqdatac.futures.get_dominant(underlying_symbol).iloc[-1]
            except AttributeError:
                # FIXME: why get_dominant return None???
                continue
            commission = rqdatac.futures.get_commission_margin(dominant).iloc[0]

            for p in param:
                future_dict[p] = commission[p]
            future_dict['tick_size'] = rqdatac.instruments(dominant).tick_size()
        all_futures_info.append(future_dict)

    with open(os.path.join(d, 'future_info.json'), 'w') as f:
        json.dump(all_futures_info, f, separators=(',', ':'), indent=2)


def upd_future_info(d):
    future_info_file = os.path.join(d, 'future_info.json')
    if not os.path.exists(future_info_file):
        return

    with open(future_info_file, 'r') as f:
        all_futures_info = json.load(f)

    future_list = []
    symbol_list = []
    # 缓存字典
    future_dict = {}
    for i in all_futures_info:
        if i.get('order_book_id'):
            future_list.append(i.get('order_book_id'))
            future_dict[i.get('order_book_id')] = i
        else:
            symbol_list.append(i.get('underlying_symbol'))
    today = datetime.date.today()
    today_ym = today.strftime("%y%m")   # 本月
    first_ym = today.strftime("%y01")   # 一月
    last_ym = (today.replace(day=1) - datetime.timedelta(days=15)).strftime("%y%m")   # 上月
    last2_ym = (today.replace(day=1) - datetime.timedelta(days=45)).strftime("%y%m")   # 上上月
    new_futures_info = []
    futures_order_book_id = rqdatac.all_instruments(type='Future')['order_book_id'].unique()
    for future in futures_order_book_id:
        underlying_symbol = re.match(r'^[a-zA-Z]*', future).group()
        if future in future_list:
            continue
        if len(future[len(underlying_symbol):]) != 4 or future[len(underlying_symbol):] < today_ym:
            continue
        if (underlying_symbol + last_ym) in future_dict:
            commission = future_dict[underlying_symbol + last_ym].copy()
            commission['order_book_id'] = future
            new_futures_info.append(commission)
            pass
        elif (underlying_symbol + last2_ym) in future_dict:
            commission = future_dict[underlying_symbol + last2_ym].copy()
            commission['order_book_id'] = future
            new_futures_info.append(commission)
            pass
        elif (underlying_symbol + first_ym) in future_dict:
            commission = future_dict[underlying_symbol + first_ym].copy()
            commission['order_book_id'] = future
            new_futures_info.append(commission)
            pass
    if len(new_futures_info) > 0:
        all_futures_info = all_futures_info + new_futures_info
        with open(os.path.join(d, 'future_info.json'), 'w') as f:
            json.dump(all_futures_info, f, separators=(',', ':'), indent=2)


class GenerateFileTask(ProgressedTask):
    def __init__(self, func):
        self._func = func
        self._step = 100

    @property
    def total_steps(self):
        # type: () -> int
        return self._step

    def __call__(self, *args, **kwargs):
        self._func(*args, **kwargs)
        yield self._step

STOCK_TYPE = 'CS'
INDEX_TYPE = 'INDX'
FUTURES_TYPE = 'Future'
FUND_TYPE = 'FUND'

STOCK_FIELDS = ['open', 'close', 'high', 'low', 'limit_up', 'limit_down', 'volume', 'total_turnover']
INDEX_FIELDS = ['open', 'close', 'high', 'low', 'volume', 'total_turnover']
FUTURES_FIELDS = STOCK_FIELDS + ['settlement', 'prev_settlement', 'open_interest']
FUTURES_FIELDS_MIN = INDEX_FIELDS + ['open_interest']
FUND_FIELDS = STOCK_FIELDS


class DayBarTask(ProgressedTask):
    def __init__(self, inst_type, create=False):
        self._inst_type = inst_type
        if create:
            self._order_book_ids = rqdatac.all_instruments(inst_type).order_book_id.tolist()
        else:
            today = (datetime.date.today() - datetime.timedelta(days=31)).strftime("%Y-%m-%d")
            self._order_book_ids = [row.order_book_id for idx,row in rqdatac.all_instruments(inst_type).iterrows() \
                if row.de_listed_date == '0000-00-00' or row.de_listed_date >= today]
        self._h5_fields = None

    @property
    def total_steps(self):
        # type: () -> int
        return len(self._order_book_ids)

    def __call__(self, path, fields, **kwargs):
        raise NotImplementedError


class GenerateDayBarTask(DayBarTask):
    def __init__(self, inst_type):
        super(GenerateDayBarTask, self).__init__(inst_type, True)

    def __call__(self, path, fields, frequency='1d', **kwargs):
        with h5py.File(path, 'w') as h5:
            if frequency == '1d':
                i, step = 0, 300
            else:
                i, step = 0, 100
            while True:
                order_book_ids = self._order_book_ids[i:i + step]
                order_book_ids = [x for x in order_book_ids if x not in h5]
                df = rqdatac.get_price(order_book_ids, START_DATE, datetime.date.today(), frequency,
                                       adjust_type='none', fields=fields, expect_df=True)
                if not (df is None or df.empty):
                    df.reset_index(inplace=True)
                    if 'index' in df:
                        del df['index']
                    if frequency == '1d':
                        df['datetime'] = [convert_date_to_int(d) for d in df['datetime']]
                    else:
                        df['datetime'] = [convert_dt_to_int(d) for d in df['datetime']]
                    df.set_index(['order_book_id', 'datetime'], inplace=True)
                    df.sort_index(inplace=True)
                    for order_book_id in df.index.levels[0]:
                        h5.create_dataset(order_book_id, data=df.loc[order_book_id].to_records(), **kwargs)
                i += step
                yield len(order_book_ids)
                if i >= len(self._order_book_ids):
                    break


class UpdateDayBarTask(DayBarTask):
    def h5_has_valid_fields(self, h5, wanted_fields):
        obid_gen = (k for k in h5.keys())
        wanted_fields = set(wanted_fields)
        wanted_fields.add('datetime')
        try:
            h5_fields = set(h5[next(obid_gen)].dtype.fields.keys())
            self._h5_fields = h5_fields
            # 兼容米宽处理
            if 'open_interest' not in h5_fields and 'open_interest' in wanted_fields:
                wanted_fields.remove('open_interest')
        except StopIteration:
            pass
        else:
            return h5_fields == wanted_fields
        return False

    def __call__(self, path, fields, frequency='1d', **kwargs):
        need_recreate_h5 = False
        try:
            with h5py.File(path, 'r') as h5:
                need_recreate_h5 = not self.h5_has_valid_fields(h5, fields)
        except (OSError, RuntimeError):
            need_recreate_h5 = True
        if need_recreate_h5:
            yield from GenerateDayBarTask(self._inst_type)(path, fields, frequency, **kwargs)
        else:
            try:
                h5 = h5py.File(path, 'a')
            except OSError:
                raise OSError("File {} update failed, if it is using, please update later, "
                              "or you can delete then update again".format(path))
            try:
                for order_book_id in self._order_book_ids:
                    if order_book_id in h5:
                        # 已存在的，仅更新新合约
                        if self._inst_type == FUTURES_TYPE:
                            last_ym = (datetime.date.today() - datetime.timedelta(days=31)).strftime("%y%m")
                            rm = re.match("[A-Z]{1,2}", order_book_id)
                            book_ym = order_book_id.replace(rm.group(), '')
                            if len(book_ym) == 4 and book_ym < last_ym:
                                yield 1
                                continue
                        try:
                            last_date = int(h5[order_book_id]['datetime'][-1] // 1000000)
                        except OSError:
                            raise OSError("File {} update failed, if it is using, please update later, "
                                          "or you can delete then update again".format(path))
                        except ValueError:
                            h5.pop(order_book_id)
                            start_date = START_DATE
                        else:
                            start_date = rqdatac.get_next_trading_date(last_date)
                    else:
                        start_date = START_DATE
                    df = rqdatac.get_price(order_book_id, start_date, END_DATE, frequency,
                                        adjust_type='none', fields=fields, expect_df=True)
                    if not (df is None or df.empty):
                        df = df[fields]  # Future order_book_id like SC888 will auto add 'dominant_id'
                        df = df.loc[order_book_id]
                        df.reset_index(inplace=True)
                        if frequency=='1d':
                            df['datetime'] = [convert_date_to_int(d) for d in df['datetime']]
                        else:
                            df['datetime'] = [convert_dt_to_int(d) for d in df['datetime']]
                        df.set_index('datetime', inplace=True)
                        if order_book_id in h5:
                            if self._inst_type == FUTURES_TYPE:
                                # 兼容米宽处理
                                if 'open_interest' not in h5[order_book_id].dtype.names and 'open_interest' in df:
                                    del df['open_interest']
                                if frequency=='1d':
                                    if df['prev_settlement'].iloc[0] != h5[order_book_id]['settlement'][-1]:
                                        df['prev_settlement'].iloc[0] = h5[order_book_id]['settlement'][-1]
                                if h5[order_book_id][-1] != df.to_records()[-1]:
                                    data = np.array(
                                        [tuple(i) for i in chain(h5[order_book_id][:], df.to_records())],
                                        dtype=h5[order_book_id].dtype
                                    )
                                    del h5[order_book_id]
                                    h5.create_dataset(order_book_id, data=data, **kwargs)
                            else:
                                data = np.array(
                                    [tuple(i) for i in chain(h5[order_book_id][:], df.to_records())],
                                    dtype=h5[order_book_id].dtype
                                )
                                del h5[order_book_id]
                                h5.create_dataset(order_book_id, data=data, **kwargs)
                        else:
                            if self._inst_type == FUTURES_TYPE:
                                # 兼容米宽处理
                                if 'open_interest' not in self._h5_fields and 'open_interest' in df:
                                    del df['open_interest']
                                if frequency=='1d':
                                    if df['prev_settlement'].iloc[0] != df['prev_settlement'].iloc[0]:
                                        df['prev_settlement'].iloc[0] = df['open'].iloc[0]
                                h5.create_dataset(order_book_id, data=df.to_records(), **kwargs)
                            else:
                                h5.create_dataset(order_book_id, data=df.to_records(), **kwargs)
                    yield 1
            finally:
                h5.close()


def init_rqdatac_with_warnings_catch():
    import warnings
    with warnings.catch_warnings(record=True):
        # catch warning: rqdatac is already inited. Settings will be changed
        rqdatac.init()


def update_bundle(path, create=False, enable_compression=False, concurrency=1):
    if create:
        _DayBarTask = GenerateDayBarTask
    else:
        _DayBarTask = UpdateDayBarTask

    kwargs = {}
    if enable_compression:
        kwargs['compression'] = 9

    day_bar_args = (
        ("stocks.h5", STOCK_TYPE, STOCK_FIELDS),
        ("indexes.h5", INDEX_TYPE, INDEX_FIELDS),
        ("futures.h5", FUTURES_TYPE, FUTURES_FIELDS),
        ("funds.h5", FUND_TYPE, FUND_FIELDS),
    )

    min_bar_args = (
        ("futures_1m.h5", FUTURES_TYPE, FUTURES_FIELDS_MIN, '1m'),
        ("futures_5m.h5", FUTURES_TYPE, FUTURES_FIELDS_MIN, '5m'),
        ("futures_10m.h5", FUTURES_TYPE, FUTURES_FIELDS_MIN, '10m'),
        ("futures_15m.h5", FUTURES_TYPE, FUTURES_FIELDS_MIN, '15m'),
        ("futures_30m.h5", FUTURES_TYPE, FUTURES_FIELDS_MIN, '30m'),
        ("futures_60m.h5", FUTURES_TYPE, FUTURES_FIELDS_MIN, '60m')
    )
    # 分钟数据单独放，避免更新被删除
    min_path = path.replace('bundle', 'bundle_min') if path.endswith('bundle') else path
    if not os.path.exists(min_path):
        os.mkdir(min_path)

    rqdatac.reset()
    # gen_file_funcs = (
    #     gen_instruments, gen_trading_dates, gen_dividends, gen_splits, gen_ex_factor, gen_st_days,
    #     gen_suspended_days, gen_yield_curve, gen_share_transformation, gen_future_info
    # )

    upd_file_funcs = (
        upd_instruments, upd_trading_dates, upd_dividends, upd_ex_factor, upd_st_days, 
        upd_suspended_days, upd_yield_curve, upd_future_info
    )

    with ProgressedProcessPoolExecutor(
            max_workers=concurrency, initializer=init_rqdatac_with_warnings_catch
    ) as executor:
        # windows上子进程需要执行rqdatac.init, 其他os则需要执行rqdatac.reset; rqdatac.init包含了rqdatac.reset的功能
        for func in upd_file_funcs:
            executor.submit(GenerateFileTask(func), path)
        for file, inst_type, field in day_bar_args:
            executor.submit(_DayBarTask(inst_type), os.path.join(path, file), field, **kwargs)
        for file, inst_type, field, frequency in min_bar_args:
            executor.submit(_DayBarTask(inst_type), os.path.join(min_path, file), field, frequency, **kwargs)


def insert_instruments(d):
    with open(os.path.join(d, 'instruments.pk'), 'rb') as f:
        instruments = pickle.load(f)
    instruments_all = load_inst_info()
    # 更新新股
    new_stock_list = [i['order_book_id'] for i in instruments if i['type'] in ('INDX','Future')]
    instruments_new = instruments_all[~instruments_all['code'].isin(new_stock_list)]
    if len(instruments_new) > 0:
        for idx, row in instruments_new.iterrows():
            last_i = 0
            for i in range(0, len(instruments)):
                if instruments[i]['type'] == row.type:
                    last_i = i # 记录最后一个同类型位置
                    if instruments[i]['order_book_id'] >= row['code']:
                        last_i = i
                        break
            i = last_i
            print(instruments[i])
            inst = instruments[i].copy()
            if inst['type'] in ('INDX','Future'):
                inst.update({'order_book_id':row['code'], 'abbrev_symbol':row['name'], 'symbol':row['display_name'], 'trading_code':row['code'], \
                    'listed_date':str(row['listed_date']), 'de_listed_date':str(row['de_listed_date'])})
                print(inst)
                instruments.insert(i+1, inst)
            print(instruments[i])
        with open(os.path.join(d, 'instruments.pk'), 'wb') as out:
            pickle.dump(instruments, out, protocol=2)


def insert_future_info(d):
    future_info_file = os.path.join(d, 'future_info.json')
    if not os.path.exists(future_info_file):
        return

    with open(future_info_file, 'r') as f:
        all_futures_info = json.load(f)

    future_list = []
    symbol_list = []
    # 缓存字典
    future_dict = {}
    for i in all_futures_info:
        if i.get('order_book_id'):
            future_list.append(i.get('order_book_id'))
            future_dict[i.get('order_book_id')] = i
        else:
            symbol_list.append(i.get('underlying_symbol'))
    today = datetime.date.today()
    first_ym = today.strftime("%y01")   # 一月
    last_ym = (today.replace(day=1) - datetime.timedelta(days=15)).strftime("%y%m")   # 上月
    last2_ym = (today.replace(day=1) - datetime.timedelta(days=45)).strftime("%y%m")   # 上上月
    new_futures_info = []
    futures_order_book_id = load_inst_info(FUTURES_TYPE)['code'].tolist()
    for future in futures_order_book_id:
        underlying_symbol = re.match(r'^[a-zA-Z]*', future).group()
        if future in future_list:
            continue
        if (underlying_symbol + last_ym) in future_dict:
            commission = future_dict[underlying_symbol + last_ym].copy()
            commission['order_book_id'] = future
            new_futures_info.append(commission)
            pass
        elif (underlying_symbol + last2_ym) in future_dict:
            commission = future_dict[underlying_symbol + last2_ym].copy()
            commission['order_book_id'] = future
            new_futures_info.append(commission)
            pass
        elif (underlying_symbol + first_ym) in future_dict:
            commission = future_dict[underlying_symbol + first_ym].copy()
            commission['order_book_id'] = future
            new_futures_info.append(commission)
            pass
    if len(new_futures_info) > 0:
        all_futures_info = all_futures_info + new_futures_info
        with open(os.path.join(d, 'future_info.json'), 'w') as f:
            json.dump(all_futures_info, f, separators=(',', ':'), indent=2)


class InsertDayBarTask(ProgressedTask):
    def __init__(self, inst_type):
        self._inst_type = inst_type
        df = load_inst_info(inst_type)
        print(df)
        self._order_book_ids = df['code'].tolist()

    @property
    def total_steps(self):
        # type: () -> int
        return len(self._order_book_ids)

    def __call__(self, path, fields, frequency='1d', **kwargs):
        #
        try:
            h5 = h5py.File(path, 'a')
        except OSError:
            raise OSError("File {} update failed, if it is using, please update later, "
                            "or you can delete then update again".format(path))
        try:
            old_dtype = None
            for order_book_id in self._order_book_ids:
                if order_book_id in h5:
                    try:
                        last_date = int(h5[order_book_id]['datetime'][-1] // 1000000)
                    except OSError:
                        raise OSError("File {} update failed, if it is using, please update later, "
                                        "or you can delete then update again".format(path))
                    except ValueError:
                        h5.pop(order_book_id)
                        start_date = START_DATE
                    else:
                        start_date = rqdatac.get_next_trading_date(last_date)
                else:
                    start_date = START_DATE
                df = load_inst_kline(self._inst_type, order_book_id)
                if not (df is None or df.empty):
                    df = df.rename(columns={'OPEN':'open', 'CLOSE':'close', 'HIGH':'high', 'LOW':'low', 'VOLUME':'volume', 'AMT':'total_turnover', 'DateTime':'datetime'})
                    #
                    if frequency=='1d':
                        df['datetime'] = [convert_date_to_int(d) for d in df['datetime']]
                    else:
                        df['datetime'] = [convert_dt_to_int(d) for d in df['datetime']]
                    df.set_index('datetime', inplace=True)
                    if order_book_id in h5:
                        df = df[df.index > h5[order_book_id]['datetime'][-1]]
                        if not df.empty:
                            df = df[INDEX_FIELDS]
                            for c in df:
                                df[c] = df[c].apply(lambda x: np.NaN if x is None else x)
                            data = np.array(
                                [tuple(i) for i in chain(h5[order_book_id][:], df.to_records())],
                                dtype=h5[order_book_id].dtype
                            )
                            del h5[order_book_id]
                            h5.create_dataset(order_book_id, data=data, **kwargs)
                    else:
                        df = df[INDEX_FIELDS]
                        for c in df:
                            df[c] = df[c].apply(lambda x: np.NaN if x is None else x)
                        h5.create_dataset(order_book_id, data=df.to_records(), **kwargs)
                yield 1
        finally:
            h5.close()


def insert_bundle(path, create=False, enable_compression=False, concurrency=1):
    _DayBarTask = InsertDayBarTask

    kwargs = {}
    if enable_compression:
        kwargs['compression'] = 9

    # 分钟数据单独放，避免更新被删除
    day_bar_args = (
        ("indexes.h5", INDEX_TYPE, INDEX_FIELDS),
        ("futures.h5", FUTURES_TYPE, FUTURES_FIELDS),
    )

    rqdatac.reset()

    upd_file_funcs = (
        insert_instruments, insert_future_info
    )

    with ProgressedProcessPoolExecutor(
            max_workers=concurrency, initializer=init_rqdatac_with_warnings_catch
    ) as executor:
        # windows上子进程需要执行rqdatac.init, 其他os则需要执行rqdatac.reset; rqdatac.init包含了rqdatac.reset的功能
        for func in upd_file_funcs:
            executor.submit(GenerateFileTask(func), path)
        for file, inst_type, field in day_bar_args:
            executor.submit(_DayBarTask(inst_type), os.path.join(path, file), field, **kwargs)


if __name__ == '__main__':
    rqalpha_bundle_path = rqalpha_path + '/bundle'
    default_bundle_path = os.path.abspath(os.path.expanduser(rqalpha_bundle_path))
    update_bundle(default_bundle_path, concurrency=3)
