import json
import os
import time

import requests
import pandas as pd
from fake_useragent import UserAgent


class NetEaseNews(object):

    def __init__(self):
        # 访问网易实时疫情播报平台网址
        self.url = "https://c.m.163.com/ug/api/wuhan/app/data/list-total"
        self.headers = {'User-Agent': UserAgent().random}
        self.data = self.get_html()

    def get_html(self):
        try:
            r = requests.get(url=self.url, headers=self.headers, timeout=3)
            r.encoding = r.apparent_encoding
            status = r.status_code
            json_data = json.loads(r.text)
            if json_data is not None:
                print("爬虫状态码: " + str(status))
            return json_data["data"]
        except Exception as e:
            print(e)

    """暂时没有用到"""

    def parser_common(self, value):
        """
        获取疫情数据涉及的字段
        :param value: 具体的单条数据
        :return: 所有数据的列表集合
        """
        # today_data = pd.DataFrame(value["today"])
        # today_data.columns = ["today_" + i for i in today_data.columns]
        # print(today_data)

        today = value["today"]  # 今日数据字典

        today_confirm = today.get("confirm", 0)  # 今日新增确诊
        today_suspect = today.get("suspect", 0)  # 今日新增疑似
        today_heal = today.get("heal", 0)  # 今日新增治愈
        today_dead = today.get("dead", 0)  # 今日新增死亡
        today_severe = today.get("severe", 0)  # 今日新增xxx
        today_storeConfirm = today.get("storeConfirm", 0)  # 今日新增xxx
        today_inputs = today.get("input", 0)  # 今日新增输入

        total = value["total"]  # 累计数据字典
        total_confirm = total.get("confirm", 0)  # 累计确诊
        total_suspect = total.get("suspect", 0)  # 累计疑似
        total_heal = total.get("heal", 0)  # 累计治愈
        total_dead = total.get("dead", 0)  # 累计死亡
        total_severe = total.get("severe", 0)  # 累计xx
        total_inputs = total.get("input", 0)  # 累计输入
        total_storeConfirm = total.get("storeConfirm", 0)  # 累计xxx

        return [today_confirm, today_suspect, today_heal, today_dead,
                today_severe, today_storeConfirm, today_inputs,
                total_confirm, total_suspect, total_heal, total_dead,
                total_severe, total_inputs, total_storeConfirm]

    def clean_format(self, data):
        """
        将数据中为 None的数据转换为 0
        :param data:
        :return:
        """
        df = pd.DataFrame(data)
        # df.fillna(0)
        return df

    def save_to_csv(self, data, name):
        """定义保存数据的函数"""
        # 保存的文件名名称
        file_name = name + "_" + time.strftime("%Y_%m_%d", time.localtime(time.time())) + ".csv"

        data.to_csv(file_name, index=None, encoding="utf_8_sig")

        # 检查是否保存成功，并打印提示文本
        if os.path.exists(file_name):
            print(file_name + " 保存成功")
        else:
            print('保存失败')

    def parser_chinaDayList(self):
        """获取中国国内历史数据"""
        detail_list = []
        chinaDayList = self.data["chinaDayList"]
        for day in chinaDayList:
            date = day["date"]  # 时间
            common_data = self.parser_common(day)

            detail_list.append(date + common_data)

        return detail_list

    def parser_chinaTotal(self):
        """获取今日数据"""
        chinaTotal = self.data["chinaTotal"]
        # print(chinaTotal)
        data_common = self.parser_common(chinaTotal)
        # print(data_common)
        return data_common

    def parser_world(self):
        """
        解析获取世界各国疫情数据
        :return: 列表
        """
        detail_list = []
        world_data = self.data["areaTree"]
        for country in world_data:
            name = country["name"]
            lastUpdateTime = country["lastUpdateTime"]
            common_data = self.parser_common(country)
            detail_list.append([name, lastUpdateTime] + common_data)

        # print(detail_list)
        return detail_list

    def parser_china_province(self):
        """
        中国省份数据
        :return:
        """
        detail_list = []
        province_data = self.data["areaTree"][2]["children"]
        for province in province_data:
            name = province["name"]
            lastUpdateTime = province["lastUpdateTime"]
            common_data = self.parser_common(province)

            detail_list.append([name, lastUpdateTime] + common_data)
        # print(detail_list)

        return detail_list

    def parser_china_city(self):
        detail_list = []
        province_data = self.data["areaTree"][2]["children"]  # 中国疫情数据
        for cities in province_data:
            name = cities["name"]  # 省份名字
            for city in cities["children"]:
                city_name = city["name"]  # 省市名字
                lastUpdateTime = city["lastUpdateTime"]  # 最近更新时间
                common_data = self.parser_common(city)
                detail_list.append([name, city_name, lastUpdateTime] + common_data)

        detail_list = self.clean_format(detail_list)
        self.save_to_csv(detail_list, "china_city")

        return detail_list


if __name__ == '__main__':
    NetEaseNews = NetEaseNews()
    data = NetEaseNews.parser_china_city()
    # print(data)

    # for i in data:
    #     print(i)
