#!/usr/bin/env python
# coding=utf-8
import importlib.resources as resources
import json
import logging
import pathlib
import re
import time as ptime
from datetime import datetime, time
from quant.trader import trader
import pytz
import execjs
import pandas as pd
import requests
from bs4 import BeautifulSoup
from tqdm import tqdm
from easydict import EasyDict

HEADERS = {
    'User-Agent':
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 '
    'Safari/537.36 '
}

PROXIES = {
    'http': '',
    'https': '',
}


def auth():
    auth_file = pathlib.Path('~/.cache/quant/.auth.json').expanduser()
    cred = EasyDict(json.load(auth_file.open(mode='r')))
    key = '397151C04723421F'
    ctx = execjs.compile(resources.read_text(__package__, 'jisilu.js'))
    s = requests.Session()
    s.post('https://www.jisilu.cn/account/ajax/login_process/',
           data={
               '_post_type': 'ajax',
               'aes': 1,
               'net_auto_login': '1',
               'password': ctx.call('jslencode', cred.jisilu.password, key),
               'return_url': 'https://www.jisilu.cn/',
               'user_name': ctx.call('jslencode', cred.jisilu.username, key),
           },
           proxies=PROXIES,
           headers=HEADERS)
    return s


def get_conbond_detail(code: str, client: requests.Session,
                       cache_dir: pathlib.Path):
    conbond_json = cache_dir.joinpath('%s.json' % code)
    conbond_html = cache_dir.joinpath('%s.html' % code)
    if conbond_json.exists() and conbond_html.exists():
        logging.info('JSL: Read %s from cache' % code)
        json_data = json.load(conbond_json.open(mode='r'))
        html = conbond_html.open(mode='r').read()
        ptime.sleep(1)
    else:
        logging.info('JSL: Read %s from jsl' % code)
        url = 'https://www.jisilu.cn/data/cbnew/detail_hist/%s' % code
        response = client.post(url, headers=HEADERS)
        json_data = json.loads(response.content.decode('utf-8'))
        json.dump(json_data,
                  conbond_json.open(mode='w'),
                  ensure_ascii=False,
                  indent=4)

        url = 'https://www.jisilu.cn/data/convert_bond_detail/%s' % code
        response = client.post(url, headers=HEADERS)
        html = response.content.decode('utf-8')
        conbond_html.open(mode='w').write(html)

    return json_data, html


def parse_conbond_info(html: str):
    soup = BeautifulSoup(html, 'html.parser')
    for redeem_info in soup.select('#redeem_tc'):
        for table in redeem_info.find_all('table'):
            for row in table.find_all('tr'):
                cells = row.find_all('td')
                logging.info(
                    'JSL: %s, %s, %s' %
                    (cells[1].string, cells[2].string, cells[3].string))


def get_all_conbonds(client: requests.Session, cache_dir: pathlib.Path):
    trading_json = cache_dir.joinpath('trading_conbonds.json')
    delisted_json = cache_dir.joinpath('delisted_conbonds.json')
    if trading_json.exists() and delisted_json.exists():
        logging.info('JSL: Read all conbonds from cache')
        trading = json.load(trading_json.open(mode='r'))
        delisted = json.load(delisted_json.open(mode='r'))
    else:
        logging.info('JSL: Read all conbonds from jsl')
        url = 'https://www.jisilu.cn/data/cbnew/cb_list_new/'
        response = client.post(url, headers=HEADERS)
        trading = json.loads(response.content.decode('utf-8'))
        json.dump(trading,
                  trading_json.open(mode='w'),
                  ensure_ascii=False,
                  indent=4)

        url = 'https://www.jisilu.cn/data/cbnew/delisted/'
        response = client.post(url, headers=HEADERS)
        delisted = json.loads(response.content.decode('utf-8'))
        json.dump(delisted,
                  delisted_json.open(mode='w'),
                  ensure_ascii=False,
                  indent=4)
    conbonds = {'order_book_id': []}
    for row in trading['rows']:
        conbonds['order_book_id'].append(row['id'])
    for row in delisted['rows']:
        conbonds['order_book_id'].append(row['id'])
    df = pd.DataFrame(conbonds)
    return df


def refresh_conbond(client: requests.Session, cache_dir: pathlib.Path):
    all_conbonds = get_all_conbonds(client, cache_dir)
    for code in tqdm(all_conbonds.order_book_id.tolist()):
        jsondata, html = get_conbond_detail(code, client, cache_dir)
        parse_conbond_info(html)


def refresh_delisted(client: requests.Session, cache_dir):
    fjson = 'delisted.json'
    fcsv = 'delisted.csv'
    if client is None:
        logging.info('JSL: Read delisted from cache')
        delisted = pd.read_csv(cache_dir.joinpath(fcsv),
                               index_col=['order_book_id'])
    else:
        logging.info('JSL: Update delisted')
        url = 'https://www.jisilu.cn/data/cbnew/delisted/'
        response = client.post(url, proxies=PROXIES, headers=HEADERS)
        delisted_json = json.loads(response.content.decode('utf-8'))
        rows = [row['cell'] for row in delisted_json['rows']]
        delisted = pd.DataFrame().from_records(rows)
        delisted.rename(columns={
            'bond_id': 'order_book_id',
            'bond_nm': 'symbol',
            'stock_id': 'stock_code',
            'price': 'open',
            'maturity_dt': 'maturity_date',
            'delist_dt': 'de_listed_date',
        },
            inplace=True,
            errors='raise')
        delisted.set_index('order_book_id', inplace=True)
        if cache_dir is not None:
            json.dump(delisted_json, cache_dir.joinpath(
                fjson).open(mode='w'), ensure_ascii=False, indent=4)
            delisted.to_csv(cache_dir.joinpath(fcsv))
    return delisted


def refresh_cb_index(jsl, cache_dir):
    logging.info("JSL: Refreshing cb_index...")
    url = 'https://www.jisilu.cn/webapi/cb/index_history/'
    response = jsl.get(url, headers=HEADERS, proxies=PROXIES)
    index_data = json.loads(response.content.decode('utf-8'))
    df = pd.DataFrame.from_dict(index_data['data'])
    df['order_book_id'] = 'CB.IDX'
    df['datetime'] = pd.to_datetime(df.price_dt).dt.tz_localize(
        pytz.timezone('Asia/Hong_Kong'))
    df.set_index(['datetime', 'order_book_id'], inplace=True)
    df.drop(columns=['price_dt'], inplace=True)
    for f in df.columns:
        df[f] = df[f].astype(float)
    df['volume'] = df.volume * 100000
    for f in ['open', 'close', 'low', 'high']:
        df[f] = df.price.astype(float)
    if cache_dir is not None:
        with open(cache_dir.joinpath('cb_index.json'), 'w', encoding='utf-8') as f:
            json.dump(index_data, f, ensure_ascii=False, indent=4)
        logging.info('JSL: Updating cb_index...')
        df.to_csv(cache_dir.joinpath('cb_index.csv'))


def refresh_cb_list_new(jsl, dt, cache_dir, read_cache):
    f = '%s.json' % dt
    if read_cache and cache_dir is not None and cache_dir.joinpath(f).exists():
        logging.info('JSL: Read json for %s' % dt)
        jisilu_data = json.load(cache_dir.joinpath(f).open(mode='r', encoding='utf-8'))
    else:
        assert read_cache == False
        logging.info('JSL: Read cb_list_new for %s' % dt)
        url = 'https://www.jisilu.cn/data/cbnew/cb_list_new/'
        payload = {'listed': 'Y'}
        response = jsl.post(url,
                            data=payload,
                            proxies=PROXIES,
                            headers=HEADERS)
        jisilu_data = json.loads(response.content.decode('utf-8'))
        if cache_dir is not None:
            json.dump(jisilu_data, cache_dir.joinpath(f).open(
                mode='w'), ensure_ascii=False, indent=4)
    jd = {}
    for row in jisilu_data['rows']:
        jd[row['id']] = row['cell']
    df = pd.DataFrame.from_dict(jd, orient='index')
    df = df[df.qflag2 != 'Q']  # 过滤仅机构可买
    df['bond_type'] = df.btype.apply(lambda bt: 'cb' if bt == 'C' else
                                     ('eb' if bt == 'E' else 'na'))

    def parse_bond_nm_tip(tip: str):
        if '最后交易日' in tip:
            m = re.match(r'.*最后交易日：(\d+)年(\d+)月(\d+)日', tip, re.DOTALL)
            if m is not None:
                return '{:04d}-{:02d}-{:02d}'.format(
                    *(int(n) for n in list(m.groups())))
        # if '最后转股日' in tip:
        #     m = re.match(r'.*最后转股日：(\d+)年(\d+)月(\d+)日', tip, re.DOTALL)
        #     if m is not None:
        #         return '{:04d}-{:02d}-{:02d}'.format(
        #             *(int(n) for n in list(m.groups())))
        return '0000-00-00'

    df['de_listed_date'] = df.bond_nm_tip.apply(parse_bond_nm_tip)
    df['price'] = df.price.astype(float)
    return df


def refresh_redeem(jsl, dt, cache_dir, read_cache):
    fredeem = 'redeem-%s.json' % dt
    if read_cache and cache_dir is not None and cache_dir.joinpath(fredeem).exists():
        logging.info('JSL: Read redeem json for %s' % dt)
        redeem_data = json.load(cache_dir.joinpath(fredeem).open(mode='r', encoding='utf-8'))
    else:
        assert read_cache == False
        logging.info('JSL: Read redeem from jisilu for %s' % dt)
        url = 'https://www.jisilu.cn/data/cbnew/redeem_list/'
        response = jsl.post(url, proxies=PROXIES, headers=HEADERS)
        redeem_data = json.loads(response.content.decode('utf-8'))
        if cache_dir is not None:
            json.dump(redeem_data, cache_dir.joinpath(fredeem).open(
                mode='w'), ensure_ascii=False, indent=4)
    df_redeem = pd.DataFrame.from_records(
        [row['cell'] for row in redeem_data['rows']])
    return df_redeem


def refresh_put(jsl, dt, cache_dir, read_cache):
    fput = 'put-%s.json' % dt
    if read_cache and cache_dir is not None and cache_dir.joinpath(fput).exists():
        logging.info('JSL: Read put json for %s' % dt)
        put_data = json.load(cache_dir.joinpath(fput).open(mode='r', encoding='utf-8'))
    else:
        assert read_cache == False
        logging.info('JSL: Read huishou from jisilu for %s' % dt)
        url = 'https://www.jisilu.cn/data/cbnew/huishou_list/'
        response = jsl.post(url, proxies=PROXIES, headers=HEADERS)
        put_data = json.loads(response.content.decode('utf-8'))
        if cache_dir is not None:
            json.dump(put_data, cache_dir.joinpath(fput).open(
                mode='w'), ensure_ascii=False, indent=4)
    df_put = pd.DataFrame.from_records(
        [row['cell'] for row in put_data['rows']])
    return df_put


def refresh_now(cache_dir: pathlib.Path = None, dt=None, to_juejin=False):
    now = datetime.now()
    if dt is None:
        read_cache = False
        # Get previous trading date
        dt = trader.Trader.trading_date_in_n(str(now.date()), -1)
        # If today is a trading date and trade started
        if now.time() > time(9, 30, 0) and trader.Trader.trading_date_in_n(
                dt, 1) == str(now.date()):
            dt = now.date()
    else:
        read_cache = True
    fcsv = '%s.csv' % dt
    if cache_dir is not None and cache_dir.joinpath(fcsv).exists():
        logging.info('JSL: Read %s from cache' % dt)
        df = pd.read_csv(cache_dir.joinpath(fcsv),
                         parse_dates=['datetime'])
        delisted = refresh_delisted(None, cache_dir)
    else:
        jsl = auth()
        df = refresh_cb_list_new(jsl, dt, cache_dir, read_cache)
        df_redeem = refresh_redeem(jsl, dt, cache_dir, read_cache)
        df = df.merge(df_redeem[['bond_id', 'redeem_real_days']],
                      on=['bond_id'])
        df_put = refresh_put(jsl, dt, cache_dir, read_cache)
        df = df.merge(df_put[['bond_id', 'time']], on=['bond_id'])

        # Why ytm is '-' sometimes?
        df.rename(columns={
            'bond_id': 'order_book_id',
            'bond_nm': 'symbol',
            'stock_id': 'stock_code',
            'price': 'open',
            'sprice': 'stock_price',
            'premium_rt': 'convert_premium_rate',
            'short_maturity_dt': 'maturity_date',
            'dblow': 'double_low_factor',
            'convert_value': 'conversion_value',
            'ytm_rt': 'yield_to_maturity',
            'redeem_real_days': 'call_qualified_days',
            'time': 'put_qualified_days',
            'curr_iss_amt': 'remaining_size',
        },
            inplace=True,
            errors='raise')
        df['maturity_date'] = '20' + df.maturity_date
        df['convert_premium_rate'] = df.convert_premium_rate / 100
        df['datetime'] = dt
        df['datetime'] = pd.to_datetime(df.datetime).dt.tz_localize(
            pytz.timezone('Asia/Hong_Kong'))

        if read_cache:
            delisted = refresh_delisted(None, cache_dir)
        else:
            delisted = refresh_delisted(jsl, cache_dir)
        if cache_dir is not None and (dt != now.date() or (now.time() < time(9, 30, 0)
                                                           or now.time() > time(15, 0, 0))):
            logging.info('JSL: Updating jsl files for %s...' % dt)
            if not read_cache:
                refresh_cb_index(jsl, cache_dir)
            df.to_csv(cache_dir.joinpath(fcsv), index=False)

    delisted.rename(columns={'issue_dt': 'listed_date'},
                    inplace=True,
                    errors='raise')
    if to_juejin:
        df['order_book_id'] = df.apply(
            lambda row: 'SHSE.%s' % row.order_book_id
            if row.market_cd.startswith('sh') else 'SZSE.%s' % row.order_book_id,
            axis=1)
        delisted = delisted.reset_index()
        delisted['order_book_id'] = delisted.apply(
            lambda row: 'SHSE.%s' % row.order_book_id
            if str(row.stock_code).startswith('6') else 'SZSE.%s' % row.order_book_id,
            axis=1)
        delisted.set_index('order_book_id', inplace=True)
    df.set_index(['datetime', 'order_book_id'], inplace=True)

    df.rename(columns={'list_dt': 'listed_date'}, inplace=True, errors='raise')
    df['remaining_size'] = df.remaining_size * 100000000
    ins_cols = [
        'maturity_date', 'de_listed_date', 'listed_date'
    ]
    ins = pd.concat([
        df.reset_index().drop(
            columns=['datetime']).set_index('order_book_id')[ins_cols],
        delisted[ins_cols]
    ])

    def resolve_de_listed_date(row):
        # Consider redeem_dt?
        if row.de_listed_date:
            if row.de_listed_date is None:
                return row.maturity_date
            if row.de_listed_date == '0000-00-00':
                return row.maturity_date
            return row.de_listed_date
        return row.maturity_date
    ins['de_listed_date'] = pd.to_datetime(ins.apply(lambda row: resolve_de_listed_date(row), axis=1))
    ins['listed_date'] = pd.to_datetime(ins.listed_date)
    df['de_listed_date'] = pd.to_datetime(df.apply(lambda row: resolve_de_listed_date(row), axis=1))
    df['listed_date'] = pd.to_datetime(df.listed_date)
    return ins[['listed_date', 'de_listed_date']], df