from crawler.base import BaseCrawler
import requests
from .jsl_login import get_cookie_str  # 导入获取 cookie 的函数
from storage.mysql_storage import DbHandler  # 假设存在一个获取数据库实例的函数
from crawler.jsl.jsl_data_map import alive_bond_map
import pandas as pd
from typing import Optional
from bs4 import BeautifulSoup
from datetime import datetime
import json
db_instance = DbHandler()
db_instance.create_session()

class Spider_jsl_bond_his_parser(BaseCrawler):
    def __init__(self, name,url, data_process_type,params):
        super().__init__(name,url,data_process_type, params)
        self.db_instance = db_instance  # 获取数据库实例

    def fetch_one(self,item):
        bond_id = item['转债代码']
        """获取原始数据"""
        cookie_str = get_cookie_str(self.db_instance)  # 获取 cookie
        if not cookie_str:
            print("Failed to get valid cookie.")
            return None,None
        headers = {
            "Cookie": cookie_str,
            "Content-Type": "application/json; charset=utf-8",
        }

        url = self.url % (bond_id, int(datetime.now().timestamp() * 1000))
        result = requests.post(url, headers=headers)
        result_dict = json.loads(result.content)
        data_list = []
        for item in result_dict['rows']:
            data_list.append(item['cell'])
        columns = ['date','volume', 'price',
                'stock_volume', 'sprice',
                'convert_value', 'ytm_rt', 'premium_rate',
                'curr_iss_amt', 'turnover_rt']

        if len(data_list) > 0:
            dt = pd.DataFrame(pd.DataFrame(data_list))
            dt = dt.rename(columns={'last_chg_dt': 'date', "premium_rt": "premium_rate"})
            dt['premium_rate'] = dt['premium_rate'].replace('-', '0')
            dt['ytm_rt'] = dt['ytm_rt'].replace('-', '0')
            dt['premium_rate'] = dt['premium_rate'].str[:-1]
            dt['ytm_rt'] = dt['ytm_rt'].str[:-1]
            dt = dt.replace('-', 0)
            dt = dt.fillna(0)
            dt = dt[columns]

            return bond_id,dt
        else:
            return None,None
    def parse(self, raw_data):
        return raw_data
