import pathlib
import logging
import pandas as pd
import json
from easydict import EasyDict
from gm.api import history, ADJUST_NONE, SEC_TYPE_BOND_CONVERTIBLE, get_instruments, set_serv_addr, set_token, get_instrumentinfos
from typing import Tuple
from sys import platform
from datetime import date, datetime


def refresh_conbond(
        start_date: str, end_date: str, freq: str,
        cache_dir: pathlib.Path) -> Tuple[pd.DataFrame, pd.DataFrame]:
    assert cache_dir is not None
    if platform == 'linux':
        auth_file = pathlib.Path('~/.cache/quant/.auth.json').expanduser()
        cred = EasyDict(json.loads(auth_file.open(mode='r').read()))
        logging.info('JUEJIN: Serv addr: %s' % cred.juejin.username)
        set_serv_addr(cred.juejin.username)
        set_token(cred.juejin.password)

    if freq == '1d':
        frequency = '1d'
    else:
        frequency = '60s'

    with cache_dir.joinpath('LATEST').open(mode='r') as latest:
        refresh_dt = latest.read()

    if end_date is None:
        end_date = str(datetime.now().date())
        
    fcsv = cache_dir.joinpath('all_instruments.csv')
    if refresh_dt == end_date and fcsv.exists():
        logging.info('JUEJIN: Read instruments from cache')
        all_instruments = pd.read_csv(fcsv, index_col=['order_book_id'])
    else:
        refresh_index('2017-01-01', end_date, cache_dir)
        logging.info('JUEJIN: Read instruments from server')
        all_instruments = get_instruments(sec_types=SEC_TYPE_BOND_CONVERTIBLE,
                                          df=True)
        all_instruments.rename(
            columns={
                'symbol': 'order_book_id',
                'underlying_symbol': 'stock_code',
                'sec_name': 'symbol',
                #  'delisted_date': 'de_listed_date', # Use 'de_listed_date' from jsl
            },
            inplace=True,
            errors='raise')
        all_instruments[
            'listed_date'] = all_instruments.listed_date.dt.date.astype(str)
        all_instruments[
            'trade_date'] = all_instruments.trade_date.dt.date.astype(str)
        all_instruments[
            'conversion_start_date'] = all_instruments.conversion_start_date.dt.date.astype(
            str)
        all_instruments['bond_type'] = 'cb'
        all_instruments['trading_hours'] = '09:31-11:30,13:01-15:00'
        all_instruments['board_type'] = 'MainBoard'
        all_instruments['type'] = 'CS'
        all_instruments['market_tplus'] = 0
        all_instruments['round_lot'] = 10
        all_instruments['account_type'] = 'STOCK'
        all_instruments.set_index('order_book_id', inplace=True)
        all_instruments.to_csv(fcsv)

    fconbonds = cache_dir.joinpath('conbonds_%s.csv' % freq)
    if refresh_dt == end_date and fconbonds.exists():
        logging.info('JUEJIN: Read bars from cache')
        bars = pd.read_csv(fconbonds,
                           index_col=['datetime', 'order_book_id'],
                           parse_dates=['datetime'])
    else:
        logging.info('JUEJIN: Read bars from server')
        conbonds = ','.join(all_instruments.index.tolist())
        bond_price = history(symbol=conbonds,
                             frequency=frequency,
                             start_time=start_date,
                             end_time=end_date,
                             adjust=ADJUST_NONE,
                             df=True)
        bond_price.rename(columns={
            'eob': 'datetime',
            'symbol': 'order_book_id'
        },
            inplace=True,
            errors='raise')
        assert not bond_price.empty, '%s-%s' % (start_date, end_date)
        bond_price['datetime'] = pd.to_datetime(bond_price.datetime)
        bond_price.set_index('order_book_id', inplace=True)
        bond_price.to_csv(cache_dir.joinpath('bond_price_%s.csv' % freq))

        logging.info('JUEJIN: Read stock price from server')
        stocks = ','.join(all_instruments.stock_code.tolist())
        stock_price = history(symbol=stocks,
                              frequency=frequency,
                              start_time=start_date,
                              end_time=end_date,
                              adjust=ADJUST_NONE,
                              df=True)
        stock_price.rename(columns={
            'eob': 'datetime',
        },
            inplace=True,
            errors='raise')
        stock_price['datetime'] = pd.to_datetime(stock_price.datetime)
        stock_price.to_csv(cache_dir.joinpath('stock_price_%s.csv' % freq),
                           index=False)

        df = bond_price.join(all_instruments[['stock_code']])
        stock_price.rename(columns={
            'symbol': 'stock_code',
            'open': 'stock_open',
            'close': 'stock_close',
        },
            inplace=True,
            errors='raise')

        bars = df.reset_index().merge(stock_price[[
            'stock_code', 'datetime', 'stock_open', 'stock_close'
        ]],
            on=['stock_code', 'datetime'],
            how='left')
        bars.drop(columns=[
            'stock_code', 'frequency', 'amount', 'pre_close', 'bob', 'position'
        ],
            inplace=True)

        bars.set_index(['datetime', 'order_book_id'], inplace=True)
        bars.to_csv(fconbonds)
        with cache_dir.joinpath('LATEST').open(mode='w') as latest:
            latest.write(end_date)

    return all_instruments, bars


def refresh_index(start_date: str, end_date: str, cache_dir: pathlib.Path):
    f_index = cache_dir.joinpath('index_1d.csv')
    logging.info('JUEJIN: Read index data from server')
    index = history(symbol='SHSE.000832,SHSE.000300',
                    frequency='1d',
                    start_time=start_date,
                    end_time=end_date,
                    adjust=ADJUST_NONE,
                    df=True)
    index.rename(columns={
        'eob': 'datetime',
        'symbol': 'order_book_id'
    },
        inplace=True,
        errors='raise')
    index['datetime'] = pd.to_datetime(index.datetime)
    index.set_index(['datetime', 'order_book_id'], inplace=True)
    index[['open', 'high', 'low', 'close', 'volume']].to_csv(f_index)

    f_ins = cache_dir.joinpath('index_instruments.csv')
    if not f_ins.exists():
        logging.info('JUEJIN: Read index instruments from server')
        ins = get_instruments(symbols='SHSE.000832,SHSE.000300', df=True)
        ins.rename(columns={
            'symbol': 'order_book_id',
            'sec_name': 'symbol',
            'delisted_date': 'de_listed_date',
        },
            inplace=True,
            errors='raise')
        ins['listed_date'] = ins.listed_date.dt.date.astype(str)
        ins['de_listed_date'] = ins.de_listed_date.dt.date.astype(str)
        ins['maturity_date'] = '0000-00-00'
        ins['trading_hours'] = '09:31-11:30,13:01-15:00'
        ins['board_type'] = 'MainBoard'
        ins['type'] = 'INDX'
        ins['market_tplus'] = 0
        ins['round_lot'] = 10
        ins['account_type'] = 'STOCK'
        ins.set_index('order_book_id', inplace=True)
        ins.to_csv(f_ins)


def next_year_month(year, month):
    return year + month // 12, month % 12 + 1


def prev_year_month(year, month):
    if month == 1:
        return year - 1, 12
    else:
        return year, month - 1


def get_dominant(dt: date):
    year = dt.year % 100
    month = dt.month
    days_to_delist = (get_instrumentinfos('CFFEX.IC{}{:02d}'.format(
        year, month), df=True).iloc[0].delisted_date.date() - dt).days
    if days_to_delist < 0:
        year, month = next_year_month(year, month)
        days_to_delist = (get_instrumentinfos('CFFEX.IC{}{:02d}'.format(
            year, month), df=True).iloc[0].delisted_date.date() - dt).days
    return year, month, days_to_delist
