import requests
from datetime import datetime, date
import json
import execjs
import importlib.resources as resources
from bs4 import BeautifulSoup, element
import pandas as pd
import pathlib
import pytz
from tqdm import tqdm
import time
from easydict import EasyDict as edict

HEADERS = {
    'User-Agent':
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36'
}
TZ = pytz.timezone('Asia/Hong_Kong')


def auth(username, password, js=None):
    key = '397151C04723421F'
    if js is None:
        ctx = execjs.compile(resources.read_text(__package__, 'jisilu.js'))
    else:
        ctx = execjs.compile(js)
    s = requests.Session()
    s.post('https://www.jisilu.cn/account/ajax/login_process/',
           data={
               '_post_type': 'ajax',
               'aes': 1,
               'net_auto_login': '1',
               'password': ctx.call('jslencode', password, key),
               'return_url': 'https://www.jisilu.cn/',
               'user_name': ctx.call('jslencode', username, key),
           },
           headers=HEADERS)
    return s


def get_conbond_detail(code: str, client: requests.Session,
                       cache_dir: pathlib.Path):
    conbond_json = cache_dir.joinpath('%s.json' % code)
    conbond_html = cache_dir.joinpath('%s.html' % code)
    if conbond_json.exists() and conbond_html.exists():
        # print('reading %s from cache' % code)
        json_data = json.loads(conbond_json.open(mode='r').read())
        html = conbond_html.open(mode='r').read()
        time.sleep(1)
    else:
        # print('reading %s from jsl' % code)
        url = 'https://www.jisilu.cn/data/cbnew/detail_hist/%s?___jsl=LST___t=%s' % (
            code,
            int(
                datetime.fromordinal(
                    datetime.now(TZ).date().toordinal()).timestamp() * 1000))
        response = client.post(url, headers=HEADERS)
        json_data = json.loads(response.content.decode('utf-8'))
        json.dump(json_data,
                  conbond_json.open(mode='w'),
                  ensure_ascii=False,
                  indent=4)

        url = 'https://www.jisilu.cn/data/convert_bond_detail/%s' % code
        response = client.post(url, headers=HEADERS)
        html = response.content.decode('utf-8')
        conbond_html.open(mode='w').write(html)

    return json_data, html


def parse_conbond_info(html: str):
    soup = BeautifulSoup(html, 'html.parser')
    for redeem_info in soup.select('#redeem_tc'):
        for table in redeem_info.find_all('table'):
            for row in redeem_info.find_all('tr'):
                cells = row.find_all('td')
                print('%s, %s, %s' %
                      (cells[1].string, cells[2].string, cells[3].string))


def get_all_conbonds(client: requests.Session, cache_dir: pathlib.Path):
    trading_json = cache_dir.joinpath('trading_conbonds.json')
    delisted_json = cache_dir.joinpath('delisted_conbonds.json')
    if trading_json.exists() and delisted_json.exists():
        # print('reading all conbonds from cache')
        trading = json.loads(trading_json.open(mode='r').read())
        delisted = json.loads(delisted_json.open(mode='r').read())
    else:
        # print('reading all conbonds from jsl')
        ts = int(
            datetime.fromordinal(
                datetime.now(TZ).date().toordinal()).timestamp() * 1000)
        url = 'https://www.jisilu.cn/data/cbnew/cb_list_new/?___jsl=LST___t=%s' % ts
        response = client.post(url, headers=HEADERS)
        trading = json.loads(response.content.decode('utf-8'))
        json.dump(trading,
                  trading_json.open(mode='w'),
                  ensure_ascii=False,
                  indent=4)

        url = 'https://www.jisilu.cn/data/cbnew/delisted/?___jsl=LST___t=%s' % ts
        response = client.post(url, headers=HEADERS)
        delisted = json.loads(response.content.decode('utf-8'))
        json.dump(delisted,
                  delisted_json.open(mode='w'),
                  ensure_ascii=False,
                  indent=4)
    conbonds = {'order_book_id': []}
    for row in trading['rows']:
        conbonds['order_book_id'].append(row['id'])
    for row in delisted['rows']:
        conbonds['order_book_id'].append(row['id'])
    df = pd.DataFrame(conbonds)
    df.to_csv(cache_dir.joinpath('all_conbonds.csv'), index=False)
    return df


def refresh_conbond(client: requests.Session, cache_dir: pathlib.Path):
    all_conbonds = get_all_conbonds(client, cache_dir)
    # print(len(all_conbonds))
    for code in tqdm(all_conbonds.order_book_id.tolist()):
        # print('crawling %s' % code)
        jsondata, html = get_conbond_detail(code, client, cache_dir)
        parse_conbond_info(html)


def refresh_misc(cache_dir: pathlib.Path):
    fcb_index = cache_dir.joinpath('cb_index.html')
    if fcb_index.exists():
        html = fcb_index.open(mode='r').read()
    else:
        from requests_html import HTMLSession
        session = HTMLSession()
        url = 'https://www.jisilu.cn/data/cbnew/cb_index/'
        proxies = {
            'http': '',
        }
        response = session.request('GET', url, proxies=proxies)
        response.html.render()
        html = response.html.html
        fcb_index.open(mode='w').write(html)
    soup = BeautifulSoup(html, 'html.parser')
    tbody = soup.find('tbody', attrs={'id': 'table_cb_index_body'})
    if isinstance(tbody, element.Tag):
        rows = tbody.find_all('tr')
        for row in rows:
            cols = row.find_all('td')
            cols = [ele.text.strip() for ele in cols]
            print(cols)
            break


def main():
    url = 'https://www.jisilu.cn/data/cbnew/cb_list/?___jsl=LST___t=%s' % int(
        datetime.fromordinal(date.today().toordinal()).timestamp() * 1000)
    payload = {'listed': 'Y'}
    auth_file = pathlib.Path.home().joinpath('.cache/quant/.auth.json')
    cred = edict(json.loads(auth_file.open(mode='r').read()))
    p = pathlib.Path(__file__).parent
    jsl = auth(cred.jisilu.username,
               cred.jisilu.password,
               js=p.joinpath('jisilu.js').open(mode='r').read())
    response = jsl.post(url, data=payload, headers=HEADERS)
    jisilu_data = json.loads(response.content.decode('utf-8'))
    jd = {}
    for row in jisilu_data['rows']:
        jd[row['id']] = row['cell']
    df = pd.io.json.read_json(json.dumps(jd), orient='index')
    # 过滤可交换债，只保留可转债
    df = df[df.btype == 'C']
    # 过滤仅机构可买
    df = df[df.qflag != 'Q']
    # 过滤已公布强赎
    df = df[pd.isnull(df.force_redeem)]
    df['bond_id'] = df['bond_id'].astype(str)
    df = df.rename(
        columns={
            'bond_id': 'order_book_id',
            'bond_nm': 'short_name',
            'stock_id': 'company_code',
            'price': 'bond_price',
            'sprice': 'stock_price',
            'premium_rt': 'convert_premium_rate',
            'dblow': 'double_low'
        }).reset_index()
    df['order_book_id'] = df.apply(
        lambda row: row.order_book_id + '.XSHE'
        if row.company_code.startswith('sz') else row.order_book_id + '.XSHG',
        axis=1)
    df = df[[
        'order_book_id', 'short_name', 'company_code', 'bond_price',
        'stock_price', 'convert_premium_rate', 'double_low'
    ]]
    print(df.nsmallest(20, 'double_low'))


if __name__ == '__main__':
    main()
