import pathlib
import logging
import pandas as pd
import json
from easydict import EasyDict
from gm.api import history, ADJUST_NONE, SEC_TYPE_BOND_CONVERTIBLE, get_instruments, get_trading_dates, \
    set_serv_addr, set_token
from . import jisilu
from typing import Tuple


def refresh_conbond(
        start_date: str,
        end_date: str,
        freq: str,
        cache_dir: pathlib.Path = None,
        jsl_dir: pathlib.Path = None) -> Tuple[pd.DataFrame, pd.DataFrame]:
    if cache_dir is not None:
        auth_file = pathlib.Path('~/.cache/quant/.auth.json').expanduser()
        cred = EasyDict(json.loads(auth_file.open(mode='r').read()))
        logging.info('Serv addr: %s' % cred.juejin.username)
        set_serv_addr(cred.juejin.username)
        set_token(cred.juejin.password)
        refresh_index('2017-01-01', end_date, cache_dir)

    if freq == '1d':
        frequency = '1d'
    else:
        frequency = '60s'

    if cache_dir is not None and cache_dir.joinpath(
            'all_instruments.csv').exists():
        logging.info('Read juejin ins from cache')
        all_instruments = pd.read_csv(
            cache_dir.joinpath('all_instruments.csv'),
            index_col=['order_book_id'])
    else:
        logging.info('Read instruments from juejin')
        all_instruments = get_instruments(sec_types=SEC_TYPE_BOND_CONVERTIBLE,
                                          df=True)
        all_instruments.rename(
            columns={
                'symbol': 'order_book_id',
                'underlying_symbol': 'stock_code',
                'sec_name': 'symbol',
                #  'delisted_date': 'de_listed_date',
            },
            inplace=True)
        all_instruments['bond_type'] = 'cb'
        all_instruments['trading_hours'] = '09:31-11:30,13:01-15:00'
        all_instruments['board_type'] = 'MainBoard'
        all_instruments['type'] = 'CS'
        all_instruments['market_tplus'] = 0
        all_instruments['round_lot'] = 10
        all_instruments['account_type'] = 'STOCK'
        all_instruments.set_index('order_book_id', inplace=True)
        if cache_dir is not None:
            all_instruments.to_csv(cache_dir.joinpath('all_instruments.csv'))

    all_instruments = all_instruments.reset_index()
    all_instruments['cb_id'] = all_instruments.order_book_id.apply(
        lambda obi: int(obi[5:]))
    all_instruments.set_index('cb_id', inplace=True)
    # dtype of index is int64
    ins, indicators = jisilu.refresh_now(jsl_dir)
    all_instruments = all_instruments.join(ins).reset_index(
        drop=True).set_index('order_book_id')
    '''
    for col in [
            'trade_date', 'delisted_date', 'listed_date',
            'conversion_start_date'
    ]:
        all_instruments[col] = all_instruments[col].dt.date
    '''

    if cache_dir is not None and cache_dir.joinpath(
            'conbonds_%s.csv' % freq).exists():
        logging.info('Read bars from cache')
        bars = pd.read_csv(cache_dir.joinpath('conbonds_%s.csv' % freq),
                           index_col=['datetime', 'order_book_id'],
                           parse_dates=['datetime'])
    else:
        logging.info('Read conbonds from juejin')
        conbonds = ','.join(all_instruments.index.tolist())
        bond_price = history(symbol=conbonds,
                             frequency=frequency,
                             start_time=start_date,
                             end_time=end_date,
                             adjust=ADJUST_NONE,
                             df=True)
        bond_price.rename(columns={
            'eob': 'datetime',
            'symbol': 'order_book_id'
        },
                          inplace=True)
        assert not bond_price.empty, '%s-%s' % (start_date, end_date)
        bond_price['datetime'] = pd.to_datetime(bond_price.datetime)
        bond_price.set_index('order_book_id', inplace=True)
        if cache_dir is not None:
            bond_price.to_csv(cache_dir.joinpath('bond_price_%s.csv' % freq))

        logging.info('Read stock price from juejin')
        stocks = ','.join(all_instruments.stock_code.tolist())
        stock_price = history(symbol=stocks,
                              frequency=frequency,
                              start_time=start_date,
                              end_time=end_date,
                              adjust=ADJUST_NONE,
                              df=True)
        stock_price.rename(columns={
            'eob': 'datetime',
        }, inplace=True)
        stock_price['datetime'] = pd.to_datetime(stock_price.datetime)
        if cache_dir is not None:
            stock_price.to_csv(cache_dir.joinpath('stock_price_%s.csv' % freq),
                               index=False)

        df = bond_price.join(all_instruments[['stock_code']])
        stock_price.rename(columns={
            'symbol': 'stock_code',
            'open': 'stock_open',
            'close': 'stock_close',
        },
                           inplace=True)

        bars = df.reset_index().merge(stock_price[[
            'stock_code', 'datetime', 'stock_open', 'stock_close'
        ]],
                                      on=['stock_code', 'datetime'],
                                      how='left')
        bars.drop(columns=[
            'stock_code', 'frequency', 'amount', 'pre_close', 'bob', 'position'
        ],
                  inplace=True)

        if jsl_dir:
            # indicators from jisilu
            dfs = []
            for dt in get_trading_dates(exchange='SZSE',
                                        start_date=start_date,
                                        end_date=end_date):
                fjsl = jsl_dir.joinpath('%s.csv' % dt)
                assert fjsl.exists(), dt
                dfs.append(pd.read_csv(fjsl, parse_dates=['datetime']))
            indicators = pd.concat(dfs)
            indicators.set_index(['datetime', 'order_book_id'], inplace=True)
        indicators = indicators.reset_index().rename(columns={
            'order_book_id': 'cb_id'
        }).set_index(['datetime', 'cb_id'])
        bars['cb_id'] = bars.order_book_id.apply(lambda obi: int(obi[5:]))
        bars.set_index(['datetime', 'cb_id'], inplace=True)
        bars = bars.join(indicators[[
            'conversion_value', 'yield_to_maturity', 'remaining_size',
            'double_low_factor', 'convert_premium_rate', 'call_qualified_days',
            'put_qualified_days'
        ]])
        bars = bars.reset_index().drop(columns=['cb_id']).sort_values(['datetime', 'order_book_id']).set_index(
            ['datetime', 'order_book_id'])
        if cache_dir is not None:
            bars.to_csv(cache_dir.joinpath('conbonds_%s.csv' % freq))

    return all_instruments, bars


def refresh_index(start_date: str, end_date: str, cache_dir: pathlib.Path):
    f_index = cache_dir.joinpath('index_1d.csv')
    if not f_index.exists():
        logging.info('Read index data from juejin')
        index = history(symbol='SHSE.000832,SHSE.000300',
                        frequency='1d',
                        start_time=start_date,
                        end_time=end_date,
                        adjust=ADJUST_NONE,
                        df=True)
        index.rename(columns={
            'eob': 'datetime',
            'symbol': 'order_book_id'
        },
                     inplace=True)
        index['datetime'] = pd.to_datetime(index.datetime)
        index.set_index(['datetime', 'order_book_id'], inplace=True)
        index[['open', 'high', 'low', 'close', 'volume']].to_csv(f_index)

    f_ins = cache_dir.joinpath('index_instruments.csv')
    if not f_ins.exists():
        logging.info('Read index instruments from juejin')
        ins = get_instruments(symbols='SHSE.000832,SHSE.000300', df=True)
        ins.rename(columns={
            'symbol': 'order_book_id',
            'sec_name': 'symbol',
            'delisted_date': 'de_listed_date',
        },
                   inplace=True)
        ins['listed_date'] = ins.listed_date.dt.date.astype(str)
        ins['de_listed_date'] = ins.de_listed_date.dt.date.astype(str)
        ins['maturity_date'] = '0000-00-00'
        ins['trading_hours'] = '09:31-11:30,13:01-15:00'
        ins['board_type'] = 'MainBoard'
        ins['type'] = 'INDX'
        ins['market_tplus'] = 0
        ins['round_lot'] = 10
        ins['account_type'] = 'STOCK'
        ins.set_index('order_book_id', inplace=True)
        ins.to_csv(f_ins)
