# -*- codeing = utf-8 -*-
# @Time: 2022/1/28 13:31
# @Author: Foxhuty
# @File: spider_01.py
# @Software: PyCharm
# @Based on python 3.9
import requests
from bs4 import BeautifulSoup
import re
import json
from tqdm import tqdm
import time
from concurrent.futures import ThreadPoolExecutor


class CoronaVirusSpider(object):
    def __init__(self):
        self.home_url = 'https://ncov.dxy.cn/ncovh5/view/pneumonia'

    @staticmethod
    def get_content_from_url(url):
        """
        根据url
        :param url:
        :return:
        """
        response = requests.get(url)
        return response.content.decode()

    @staticmethod
    def parse_home_page(home_page, html_id):
        # 2、利用Beautiful提取数据
        soup = BeautifulSoup(home_page, 'lxml')
        # 获取所想要的网页标签
        script = soup.find(id=html_id)
        # 提取出想要的数据
        text = script.text
        # print(text)
        # 3、获取json格式字符串
        pattern = r'\[.+\]'
        json_str = re.findall(pattern, text)[0]
        # print(json_str)
        # 解析json字符串，转换为python类型
        data = json.loads(json_str)
        return data

    @staticmethod
    def save(data, path):
        # 以json格式保存文件
        with open(path, 'w', encoding='utf-8') as fp:
            json.dump(data, fp, ensure_ascii=False)

    def crawl_last_day_corona_virus(self):
        # 发送请求，获取并首页内容
        home_page = self.get_content_from_url(self.home_url)
        # 解析首页内容，获取最近一天的各国疫情数据
        last_day_corona_virus = self.parse_home_page(home_page, "getListByCountryTypeService2true")
        # 保存数据
        self.save(last_day_corona_virus, './data/last_day_corona_virus.json')

    def crawl_last_day_corona_virus_china(self):
        # 发送请求，获取并首页内容
        home_page = self.get_content_from_url(self.home_url)
        # 解析首页内容，获取最近一天的各国疫情数据
        data = self.parse_home_page(home_page, "getAreaStat")
        # 保存数据
        self.save(data, 'data/last_day_corona_virus_china.json')

    def crawl_corona_virus(self):
        """
        采集从1月23号以来的各国疫情数据
        :return:
        """
        # 1、加载各国疫情数据
        last_day_corona_virus = self.load_virus_data('data/last_day_corona_virus.json')
        # 2、遍历各国疫情数据，获取统计的URL
        corona_virus_list = self.get_virus_json_data(last_day_corona_virus, '各国疫情数据采集中')
        # 5、把列表以json格式保存为文件
        self.save(corona_virus_list, 'data/corona_virus.json')

    def get_virus_json_data(self, data, utc):
        corona_virus_list = []
        for country in tqdm(data, utc):
            # 3、发送请求，获取各国从1月23号至今的json数据
            statistics_data_url = country['statisticsData']
            statistics_data_json_str = self.get_content_from_url(statistics_data_url)
            # 4、把json数据转换为python类型的数据，添加到列表中
            statistics_data = json.loads(statistics_data_json_str)['data']
            # print(statistics_data)
            for one_day in statistics_data:
                one_day['provinceName'] = country['provinceName']
                if one_day.get('countryShortCode'):
                    one_day['countryShortCode'] = country['countryShortCode']
            corona_virus_list.extend(statistics_data)
            # print(statistics_data)
        return corona_virus_list

    @staticmethod
    def load_virus_data(path):
        with open(path, encoding='utf-8') as fp:
            last_day_corona_virus = json.load(fp)
        return last_day_corona_virus

    def crawl_corona_virus_china(self):
        # 1、加载各省疫情数据
        last_day_corona_virus_china = self.load_virus_data('data/last_day_corona_virus_china.json')
        # 2、遍历各国疫情数据，获取统计的URL
        corona_virus_list = self.get_virus_json_data(last_day_corona_virus_china, '各省疫情数据采集中')
        # 5、把列表以json格式保存为文件
        self.save(corona_virus_list, 'data/corona_virus_china.json')

    def run(self):
        pool.submit(self.crawl_last_day_corona_virus)
        pool.submit(self.crawl_last_day_corona_virus_china)
        pool.submit(self.crawl_corona_virus)
        pool.submit(self.crawl_corona_virus_china)

    @staticmethod
    def use_time(f):
        def wrapper(*args, **kwargs):
            t1 = time.time()
            results = f(*args, **kwargs)
            t2 = time.time()
            print(f'主程序{f.__name__}用时{(t2 - t1):.1f}秒')
            print(f'程序运行结束！')
            return results

        return wrapper


if __name__ == '__main__':
    pool = ThreadPoolExecutor()
    spider = CoronaVirusSpider()
    spider.run()


    @spider.use_time
    def main():
        pool.submit(spider.run)

    #
    pool.submit(main)
    # main()
