import base64
import datetime
import hashlib
import json
import time

import re


from parsel import Selector

from re_common.vip.baseencodeid import BaseLngid

from apps.crawler_platform.core_platform.core_sql import CoreSqlValue
from apps.crawler_platform.core_platform.g_model import DealModel, CallBackModel, JournalListModel, JournalIssueModel, \
    JournalArticleModel, DealInsertModel, JournalHomeModel, EtlDealModel, DealUpdateModel, OtherListModel, \
    OtherArticleModel, EtlCallBackModel, OtherArticleEtlModel, T1

__all__ = [
    "cnkictbd_cnkictbdlist_callback",
    "cnkictbd_cnkictbdarticle_callback",
    "cnkictbd_cnkictbdarticle_etl_callback",

]

def deal_sql_dict(sql_dict):
    sql_dict.pop("id")
    sql_dict.pop("update_time")
    sql_dict.pop("create_time")
    sql_dict.pop("null_dicts")
    sql_dict.pop("err_msg")
    sql_dict.pop("other_dicts")
    sql_dict.pop("state")
    sql_dict.pop("failcount")
    return sql_dict


def cnkictbd_cnkictbdlist_callback(callmodel: CallBackModel[OtherListModel]) -> DealModel:
    result = DealModel()
    para_dicts = callmodel.para_dicts
    task_info = callmodel.redis_all.parse_dict["1_1"].task_info
    sql_model = callmodel.sql_model
    info_dicts = {"task_name": callmodel.sql_model.task_name,
                  "task_tag": callmodel.sql_model.task_tag,
                  "task_tag_next": task_info.task_tag_next,
                  }
    data = para_dicts['data']['1_1']["html"]
    # data = para_dicts['data']['1_1']['data']

    page_index = int(callmodel.sql_model.page_index)
    if page_index == 1:
        di_model_bef = DealInsertModel()
        di_model_bef.insert_pre = CoreSqlValue.insert_ig_it
        new_one = data.replace("null", '""')
        ddd = eval(new_one)
        html = ddd[1]
        sel = Selector(html)
        total_page = sel.css("#hidTotalPage::attr(value)").get()
        sql_dict = callmodel.sql_model.dict()
        sql_dict = deal_sql_dict(sql_dict)
        for page in range(2,int(total_page) + 1):
            sql_dict["page"] = total_page
            sql_dict["page_index"] = page
            di_model_bef.lists.append(sql_dict.copy())
            if len(di_model_bef.lists) > 10000:
                result.befor_dicts.insert.append(di_model_bef)
                di_model_bef = DealInsertModel()
                di_model_bef.insert_pre = CoreSqlValue.insert_ig_it
        result.befor_dicts.insert.append(di_model_bef)
    di_model_next = DealInsertModel()
    di_model_next.insert_pre = CoreSqlValue.insert_ig_it
    new_one = data.replace("null", '""')
    ddd = eval(new_one)
    li_list = ddd[0]
    for li in li_list:
        temp = info_dicts.copy()
        article_json = dict()
        temp["task_tag"] = temp["task_tag_next"]
        del temp["task_tag_next"]
        rawid = li["tableId"]
        fileName = li["fileName"]
        temp["rawid"] = rawid
        article_json["fileName"] = fileName
        temp["article_json"] = json.dumps(article_json, ensure_ascii=False)
        di_model_next.lists.append(temp)
    result.next_dicts.insert.append(di_model_next)
    return result


def cnkictbd_cnkictbdarticle_callback(callmodel: CallBackModel[OtherArticleModel]) -> DealModel:
    result = DealModel()
    return result


def cnkictbd_cnkictbdarticle_etl_callback(callmodel: EtlCallBackModel[OtherArticleEtlModel, T1]) -> EtlDealModel:
    result = EtlDealModel()
    para_dicts_before = callmodel.para_dicts
    sql_model = callmodel.sql_model
    rawid = sql_model.rawid
    del para_dicts_before["journal_rawids"]

    def deal(value):
        result = ""
        if isinstance(value, list):
            for val in value:
                val = val.replace("\r", "").replace("\n", "").strip()
                if val != "":
                    result = result + val
        else:
            result = value
        return result
    # print(para_dicts)
    para_dicts = para_dicts_before["data"]
    latest_data = {}
    if "1_1" in para_dicts.keys():
        result_1_1 = para_dicts["1_1"]["html"]
        result_1_1 = result_1_1.replace("null", '""')
        result_1_1_list = eval(result_1_1)
        info_data = result_1_1_list[0]
        image_title = deal(info_data["title"])  # 图片名 或者说图片标题
        image_pianming = deal(info_data["pianming"])
        image_biaozhu = deal(info_data["biaozhu"])
        article_rawid = deal(info_data["fileName"])
        organ = deal(info_data["jigou"])
        pub_date = deal(info_data["publishDate"])
        num = deal(info_data["qi"])
        pub_year = deal(info_data["year"])
        image_tablejiexi = deal(info_data["tablejiexi"])
        ztzlmcode = deal(info_data["ztzlmcode"])
        author = deal(info_data["author"])
        downcount = deal(info_data["downcount"])
        unit = deal(info_data["unit"])
        collectType = deal(info_data["collectType"])
        excelUrl = deal(info_data["excelUrl"])
        collGuid = deal(info_data["collGuid"])
        CollectTime = deal(info_data["CollectTime"])

        info_data_list = result_1_1_list[1]
        list_imageid = []
        list_image_info = []
        for one_info in info_data_list:
            list_imageid.append((one_info["tableId"], one_info["title"]))
            list_image_info.append(one_info)
        latest_data["list_imageid"] = list_imageid
        latest_data["list_image_info"] = list_image_info


        image_url = "https://ctbd.cnki.net/Home/ShowImage?tableID=" + rawid

        latest_data["image_title"] = image_title
        latest_data["image_pianming"] = image_pianming
        latest_data["image_url"] = image_url
        latest_data["image_biaozhu"] = image_biaozhu
        latest_data["article_rawid"] = article_rawid
        latest_data["organ"] = organ
        latest_data["pub_date"] = pub_date
        latest_data["num"] = num
        latest_data["pub_year"] = pub_year
        latest_data["organ"] = organ
        latest_data["image_tablejiexi"] = image_tablejiexi
        latest_data["ztzlmcode"] = ztzlmcode
        latest_data["author"] = author
        latest_data["downcount"] = downcount
        latest_data["unit"] = unit
        latest_data["collecttype"] = collectType
        latest_data["excelurl"] = excelUrl
        latest_data["collguid"] = collGuid
        latest_data["collecttime"] = CollectTime
    else:
        result.status = "FAILED"
        result.code = 7
        result.err_msg = "规则解析错误 cnkictbd 1_1 不存在"
        return result

    if "1_2" in para_dicts.keys():
        img_str = para_dicts["1_2"]["image_base64"]
        # new_img_str = json.loads(img_str)
        # img_str = new_img_str["image_base64"]
        bt = img_str.encode("utf-8")
        img_data = base64.b64decode(
            bt)

        # article_dicts = json.loads(callmodel.sql_model.article_json)
        file_name = rawid
        from apps.crawler_platform.util.mmh3encode import normFileName_image, generateHashName
        file_name = normFileName_image(file_name)
        file_path = generateHashName(file_name)
        minio_path = "pic/cnkictbdimage/" + file_path + "/" + file_name
        # minio_bk = "crawl.dc.cqvip.com"
        result.other_info = {"image": {"path": minio_path, "image_data": img_data}}
        latest_data["image_path"] = minio_path
        latest_data["image_size"] = len(img_data)
        latest_data["image_data_base64"] = img_str
        latest_data["image_hash_md5"] = hashlib.md5(img_data).hexdigest()

    else:
        result.status = "FAILED"
        result.code = 7
        result.err_msg = "规则解析错误 cnkictbd 1_2 不存在"
        return result

    latest_data["_id"] = callmodel.sql_model.rawid
    latest_data["update_time"] = str(datetime.datetime.now())
    result.save_data = [{"table": "cnkictbdimage_latest", "data": latest_data}]

    return result