#coding=utf-8
import requests
from lxml import etree
import json
import pandas as pd
from bs4 import BeautifulSoup
pd.set_option('display.max_columns', None)
pd.set_option('expand_frame_repr',False)



def transform_dict(label:str,old_dict:dict,new_dict:dict)->None:
    if old_dict[label] == "":
        new_dict[label] = '0'
    else:
        new_dict[label] = old_dict[label]

def gethtml(url):
    headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36"}
    res = requests.get(url,headers=headers) #访问url获得响应
    res.encoding = 'UTF-8'
    return res.text


def xpath_json(res)->dict:
    html = etree.HTML(res)
    str_list = html.xpath("//script[@id='captain-config']/text()")
    str_list = str_list[0].replace('\'','')
    str_list = str_list.encode('UTF-8')
    json_list = json.loads(str_list)
    return json_list

def save_jsonfile(json_list:dict)->None:
    with open('JsonData.json','w',encoding='utf-8') as f:
        json.dump(json_list, f, ensure_ascii=False, indent=4)

def get_data()-> list:
    '''
    调用这个接口返回各个数据
    :param json_list:
    :return:
            summaryDataIn：国内汇总数据
            summaryDataOut: 国外汇总数据
            summaryProvince: 国内各省数据汇总
            summaryCity:国内各市数据汇总   
    '''
    url = "https://voice.baidu.com/act/newpneumonia/newpneumonia/?from=osari_aladin_banner"
    res = gethtml(url)
    json_list = xpath_json(res)
    data = json_list['component'][0]
    summaryDataIn = pd.DataFrame.from_dict(data['summaryDataIn'],orient='index',columns=['data']).T
    summaryDataOut = pd.DataFrame.from_dict(data['summaryDataOut'], orient='index', columns=['data']).T
    totalIn_data = data['caseList']
    total_province_list = []
    province_label = ['confirmed', 'died', 'crued', 'confirmedRelative', 'diedRelative', 'curedRelative',
                      'asymptomaticRelative', 'asymptomaticLocalRelative', 'asymptomatic', 'nativeRelative',
                      'screeningPositive', 'curAsymptomaticLocalRelative', 'curConfirm', 'curConfirmRelative',
                      'noNativeRelativeDays', 'overseasInputRelative', 'icuDisable', 'area']
    total_city_list = []
    city_label = ['city', 'confirmed', 'died', 'crued', 'asymptomaticRelative', 'confirmedRelative',
                  'asymptomaticLocalRelative', 'asymptomatic', 'nativeRelative', 'screeningPositive',
                  'curAsymptomaticLocalRelative', 'updateTime', 'curConfirm', 'noNativeRelativeDays']
    for province in totalIn_data:
        new_province_dict = dict()
        for label in province_label:
            transform_dict(label, province, new_province_dict)
        total_province_list.append(new_province_dict)
        for city in province['subList']:
            new_city_dict = dict()
            for label in city_label:
                transform_dict(label, city, new_city_dict)
            total_city_list.append(new_city_dict)

    summaryProvince = pd.DataFrame(total_province_list)
    summaryCity = pd.DataFrame(total_city_list)

    return [summaryDataIn,summaryDataOut,summaryProvince,summaryCity]

def get_baidu_hot():
    """
    :return: 返回百度疫情热搜
    """
    url = "https://top.baidu.com/board?tab=realtime"
    headers = {
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36',
    }
    res = requests.get(url, headers=headers)
    html = res.text
    soup = BeautifulSoup(html, features="html.parser")
    kw = soup.select("div.c-single-text-ellipsis")
    count = soup.select("div.hot-index_1Bl1a")
    context = []
    for i in range(len(kw)):
        k = kw[i].text.strip()  # 移除左右空格
        v = count[i].text.strip()
        context.append(f"{k}{v}".replace('\n', ''))
    return context


if __name__ == '__main__':
    # total_list = get_data()
    # for i in range(4):
    #     print(total_list[i],end='\n\n')
    with open('JsonData.json', "r",encoding='utf-8') as f:
        row_data = json.load(f)
        print(row_data['component'][0])