'''
  案例9.19：使用自定义爬虫模块easySpider爬取腾讯疫情全球数据输出到文件
'''

#1.导入easySpider类和json模块
import easySpider
import json

#2.实例化easySpider，根据url爬取JSON数据,返回字符串格式的数据
url = 'https://api.inews.qq.com/newsqa/v1/automation/modules/list?modules=FAutoGlobalStatis,FAutoContinentStatis,FAutoGlobalDailyList,FAutoCountryConfirmAdd'
txData = easySpider.Spider(url=url)
resultStr = txData.requests_getHtmlSource()

#3.使用json的loads方法将字符串格式转换为python字典
resutDict=json.loads(resultStr)

#4.取出其中全球每日统计数据显示并写入到文本文件中
with open('txGloablDailyRecord.dat',mode='w+') as f:
    #先将标题拼接后写入
    titleList=' '.join(['date','confirm','dead','heal','newAddConfirm','deadRate','healRate'])
    f.write(titleList+'\n')
    #将每行数据拼接后写入
    for item in resutDict['data']['FAutoGlobalDailyList']:
        strLine =' '.join([item['date'],str(item['all']['confirm']),str(item['all']['dead']),
                       str(item['all']['heal']),str(item['all']['newAddConfirm']),
                       str(item['all']['deadRate']),str(item['all']['healRate'])])
        f.write(strLine+'\n')