import requests
import json
import csv
import time

# 国内数据
'''
now_time = int(round(time.time() * 1000))

url = 'https://c.m.163.com/ug/api/wuhan/app/data/list-total?t=' + str(now_time)

headers = {
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Mobile Safari/537.36',
}
resp = requests.get(url, headers=headers)
data = json.loads(resp.content.decode())

# 每天的列表数据
csv_data = []
for row in data['data']['chinaDayList']:
    left_confirm = row['total']['confirm'] - row['total']['heal'] - row['total']['dead']
    csv_data.append([row['date'], row['today']['confirm'], row['today']['suspect'],row['today']['heal'],row['today']['dead'],
                                  row['total']['confirm'], left_confirm, row['total']['suspect'],row['total']['heal'],row['total']['dead'],row['total']['severe']])

# 获取当天的数据
now_time = float(now_time/1000)
timeArray = time.localtime(now_time)
today = time.strftime("%Y-%m-%d", timeArray)
today_data = data['data']['chinaTotal']
left_confirm = today_data['total']['confirm'] - today_data['total']['heal'] - today_data['total']['dead']
csv_data.append([today, today_data['today']['confirm'],today_data['today']['suspect'],today_data['today']['heal'],today_data['today']['dead'],
                        today_data['total']['confirm'],left_confirm,today_data['total']['suspect'],today_data['total']['heal'],today_data['total']['dead'],today_data['total']['severe']])

# 写入文件
f = open('./data.csv', 'w', encoding='utf-8' ,newline='')
csv_writer = csv.writer(f)
csv_writer.writerow(['日期','当天新增确诊','当天新增疑似','当天新增治愈','当天新增死亡','累计确诊','现有确诊','现有疑似','累计治愈','累计死亡','现有危重病例'])
csv_writer.writerows(csv_data)
f.close()
'''

# 国外数据
'''
country_url = 'https://api.inews.qq.com/newsqa/v1/automation/foreign/country/ranklist'
headers = {
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Mobile Safari/537.36',
}
resp = requests.get(country_url, headers=headers)
data = json.loads(resp.content.decode())
country_list = []
for k in data['data']:
    country_list.append(k['name'])
  
#print(country_list)
csv_data = []
for country in country_list:
    try:
        new_url = 'https://api.inews.qq.com/newsqa/v1/automation/foreign/daily/list?country=' + country
        new_resp = requests.get(new_url, headers=headers)
        new_data = json.loads(new_resp.content.decode())

        #国家 日期 累计确诊，现有确诊，当日新增，累计死亡，累计治愈
        for row in new_data['data']:
            date = '2020/' + row['date'].replace('.', '/')
            now_confirm = row['confirm'] - row['heal'] - row['dead']
            csv_data.append([country, date, row['confirm'], now_confirm, row['confirm_add'], row['dead'], row['heal']])
    except:
        print(country)
    

# 写入文件
f = open('./world_data.csv', 'w', encoding='utf-8' ,newline='')
csv_writer = csv.writer(f)
csv_writer.writerow(['国家','日期','累计确诊','现有确诊','当天新增','累计死亡','累计治愈'])
csv_writer.writerows(csv_data)
f.close()    
'''
    
# 国内和国外数据放在一起

# 国内数据
now_time = int(round(time.time() * 1000))

url = 'https://c.m.163.com/ug/api/wuhan/app/data/list-total?t=' + str(now_time)

headers = {
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Mobile Safari/537.36',
}
resp = requests.get(url, headers=headers)
data = json.loads(resp.content.decode())

# 每天的列表数据  国家 日期 累计确诊，现有确诊，当日新增，累计死亡，累计治愈
csv_data = []
for row in data['data']['chinaDayList']:
    left_confirm = row['total']['confirm'] - row['total']['heal'] - row['total']['dead']
    csv_data.append(['中国',row['date'], row['total']['confirm'], left_confirm, row['today']['confirm'], row['total']['dead'],row['total']['heal']])

# 获取当天的数据
now_time = float(now_time/1000)
timeArray = time.localtime(now_time)
today = time.strftime("%Y-%m-%d", timeArray)
today_data = data['data']['chinaTotal']
left_confirm = today_data['total']['confirm'] - today_data['total']['heal'] - today_data['total']['dead']
csv_data.append(['中国',today, today_data['total']['confirm'],left_confirm,today_data['today']['confirm'],today_data['total']['dead'],today_data['total']['heal']])



# 国外数据

country_url = 'https://api.inews.qq.com/newsqa/v1/automation/foreign/country/ranklist'
headers = {
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Mobile Safari/537.36',
}
resp = requests.get(country_url, headers=headers)
data = json.loads(resp.content.decode())
country_list = []
for k in data['data']:
    country_list.append(k['name'])
  
#print(country_list)
for country in country_list:
    try:
        new_url = 'https://api.inews.qq.com/newsqa/v1/automation/foreign/daily/list?country=' + country
        new_resp = requests.get(new_url, headers=headers)
        new_data = json.loads(new_resp.content.decode())

        #国家 日期 累计确诊，现有确诊，当日新增，累计死亡，累计治愈
        for row in new_data['data']:
            date = '2020/' + row['date'].replace('.', '/')
            now_confirm = row['confirm'] - row['heal'] - row['dead']
            csv_data.append([country, date, row['confirm'], now_confirm, row['confirm_add'], row['dead'], row['heal']])
    except:
        print(country)
    

# 写入文件
f = open('./all_world_data.csv', 'w', encoding='utf-8' ,newline='')
csv_writer = csv.writer(f)
csv_writer.writerow(['国家','日期','累计确诊','现有确诊','当天新增','累计死亡','累计治愈'])
csv_writer.writerows(csv_data)
f.close()    
