import requests
import pymysql
import pandas as pd
from bs4 import BeautifulSoup
import re
class YQ():
    def __init__(self):
        self.url="https://ncov.dxy.cn/ncovh5/view/pneumonia"
        self.header={'User_Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36'}
        self.conn=pymysql.connect(host="localhost",port=3306,database='yq',user='root',password="")
        self.all_confirm=0
    def GetData(self):

        # 省级正则表达式
        provinceName_re = re.compile(r'"provinceName":"(.*?)",')
        provinceShortName_re = re.compile(r'"provinceShortName":"(.*?)",')
        currentConfirmedCount_re = re.compile(r'"currentConfirmedCount":(.*?),')
        confirmedCount_re = re.compile(r'"confirmedCount":(.*?),')
        suspectedCount_re = re.compile(r'"suspectedCount":(.*?),')
        curedCount_re = re.compile(r'"curedCount":(.*?),')
        deadCount_re = re.compile(r'"deadCount":(.*?),')
        comment_re = re.compile(r'"comment":"(.*?)",')
        locationId_re = re.compile(r'"locationId":(.*?),')
        statisticsData_re = re.compile(r'"statisticsData":"(.*?)",')
        cities_re = re.compile(r'"cities":\[\{(.*?)\}\]')

        # 市级正则表达式
        cityName_re = re.compile(r'"cityName":"(.*?)",')
        currentConfirmedCount_1_re = re.compile(r'"currentConfirmedCount":(.*?),')
        confirmedCount_1_re = re.compile(r'"confirmedCount":(.*?),')
        suspectedCount_1_re = re.compile(r'"suspectedCount":(.*?),')
        curedCount_1_re = re.compile(r'"curedCount":(.*?),')
        deadCount_1_re = re.compile(r'"deadCount":(.*?),')
        locationId_1_re = re.compile(r'"locationId":(.*?)\},')

        # 爬虫爬取数据
        datas = requests.get(self.url, headers=self.header)
        datas.encoding = 'utf-8'
        soup = BeautifulSoup(datas.text, 'lxml')
        data = soup.find_all('script', {'id': 'getAreaStat'})
        data = str(data)
        data_str = data[54:-23]

        # 替换字符串内容，避免重复查找
        citiess = re.sub(cities_re, '8888', data_str)
        # 查找省级数据
        provinceNames = re.findall(provinceName_re, citiess)
        provinceShortNames = re.findall(provinceShortName_re, citiess)
        currentConfirmedCounts = re.findall(currentConfirmedCount_re, citiess)
        confirmedCounts = re.findall(confirmedCount_re, citiess)
        suspectedCounts = re.findall(suspectedCount_re, citiess)
        curedCounts = re.findall(curedCount_re, citiess)
        deadCounts = re.findall(deadCount_re, citiess)
        comments = re.findall(comment_re, citiess)
        locationIds = re.findall(locationId_re, citiess)
        statisticsDatas = re.findall(statisticsData_re, citiess)

        # 查找市级数据
        citiess_str1 = re.findall(cities_re, data_str)

        citiess_str = str(citiess_str1)
        cityName = re.findall(cityName_re, citiess_str)
        currentConfirmedCount_1 = re.findall(currentConfirmedCount_1_re, citiess_str)
        confirmedCount_1 = re.findall(confirmedCount_1_re, citiess_str)
        suspectedCount_1 = re.findall(suspectedCount_1_re, citiess_str)
        curedCount_1 = re.findall(curedCount_1_re, citiess_str)
        deadCount_1 = re.findall(deadCount_1_re, citiess_str)

        # 省级数据转换为pandas数组
        df = {
            # '地区代码':pd.Series(locationIds),
            '新增确诊人数': pd.Series(currentConfirmedCounts)[:]
            # '省区短名':pd.Series(provinceShortNames),
            # '当前确诊':pd.Series(currentConfirmedCounts),
            # '累计确诊':pd.Series(confirmedCounts),
            # '疑似确诊':pd.Series(suspectedCounts),
            # '治愈人数':pd.Series(curedCounts),
            # '死亡人数':pd.Series(deadCounts),
            # '评论':pd.Series(comments),
            # '统计数据区':pd.Series(statisticsDatas),
        }
        self.all_confirm = pd.Series(currentConfirmedCount_1)[:]


        self.de=str(self.all_confirm)

        #print(self.de)



    def SaveData(self):
        pass
if __name__=="__main__":
    yq=YQ()
    yq.GetData()
    print(yq.all_confirm[0])
    cs=yq.conn.cursor()
    sql = "INSERT INTO echarts_allinfo(confirm) values('{}')".format(yq.all_confirm[0])
    sql = "INSERT INTO echarts_allinfo(confirm) values('{}')".format(yq.all_confirm[19])
    sql = "INSERT INTO echarts_allinfo(confirm) values('{}')".format(yq.all_confirm[21])


    cs.execute(sql)
    yq.conn.commit()
    cs.close()
    yq.conn.close()
'''import requests
from django.http import JsonResponse
import json
import pandas as pd
from bs4 import BeautifulSoup
import re
import pymysql

from django.http import HttpResponse
url = 'https://ncov.dxy.cn/ncovh5/view/pneumonia'
headers = {
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36'
    }
#省级正则表达式
provinceName_re = re.compile(r'"provinceName":"(.*?)",')
provinceShortName_re = re.compile(r'"provinceShortName":"(.*?)",')
currentConfirmedCount_re = re.compile(r'"currentConfirmedCount":(.*?),')
confirmedCount_re = re.compile(r'"confirmedCount":(.*?),')
suspectedCount_re = re.compile(r'"suspectedCount":(.*?),')
curedCount_re = re.compile(r'"curedCount":(.*?),')
deadCount_re = re.compile(r'"deadCount":(.*?),')
comment_re = re.compile(r'"comment":"(.*?)",')
locationId_re = re.compile(r'"locationId":(.*?),')
statisticsData_re = re.compile(r'"statisticsData":"(.*?)",')
cities_re = re.compile(r'"cities":\[\{(.*?)\}\]')

#市级正则表达式
cityName_re = re.compile(r'"cityName":"(.*?)",')
currentConfirmedCount_1_re = re.compile(r'"currentConfirmedCount":(.*?),')
confirmedCount_1_re = re.compile(r'"confirmedCount":(.*?),')
suspectedCount_1_re = re.compile(r'"suspectedCount":(.*?),')
curedCount_1_re = re.compile(r'"curedCount":(.*?),')
deadCount_1_re = re.compile(r'"deadCount":(.*?),')
locationId_1_re = re.compile(r'"locationId":(.*?)\},')

#爬虫爬取数据
datas = requests.get(url,headers = headers)
datas.encoding = 'utf-8'
soup = BeautifulSoup(datas.text,'lxml')
data = soup.find_all('script',{'id':'getAreaStat'})
data = str(data)
data_str = data[54:-23]

#替换字符串内容，避免重复查找
citiess = re.sub(cities_re,'8888',data_str)
#查找省级数据
provinceNames = re.findall(provinceName_re,citiess)
provinceShortNames = re.findall(provinceShortName_re,citiess)
currentConfirmedCounts = re.findall(currentConfirmedCount_re,citiess)
confirmedCounts = re.findall(confirmedCount_re,citiess)
suspectedCounts = re.findall(suspectedCount_re,citiess)
curedCounts = re.findall(curedCount_re,citiess)
deadCounts = re.findall(deadCount_re,citiess)
comments = re.findall(comment_re,citiess)
locationIds = re.findall(locationId_re,citiess)
statisticsDatas = re.findall(statisticsData_re,citiess)


#查找市级数据
citiess_str1 = re.findall(cities_re,data_str)
#将市级列表数据转为字符串，方便正则表达式查找
citiess_str = str(citiess_str1)
cityName = re.findall(cityName_re,citiess_str)
currentConfirmedCount_1 = re.findall(currentConfirmedCount_1_re,citiess_str)
confirmedCount_1 = re.findall(confirmedCount_1_re,citiess_str)
suspectedCount_1 = re.findall(suspectedCount_1_re,citiess_str)
curedCount_1 = re.findall(curedCount_1_re,citiess_str)
deadCount_1 = re.findall(deadCount_1_re,citiess_str)

# 省级数据转换为pandas数组
df = {
    #'地区代码':pd.Series(locationIds),
    '新增确诊人数':pd.Series(currentConfirmedCounts)[:]
    #'省区短名':pd.Series(provinceShortNames),
    #'当前确诊':pd.Series(currentConfirmedCounts),
    #'累计确诊':pd.Series(confirmedCounts),
    #'疑似确诊':pd.Series(suspectedCounts),
    #'治愈人数':pd.Series(curedCounts),
    #'死亡人数':pd.Series(deadCounts),
    #'评论':pd.Series(comments),
    #'统计数据区':pd.Series(statisticsDatas),
}
de= pd.Series(currentConfirmedCount_1)[:]

pds = pd.DataFrame(de)


print(de)

if __name__=="__main__":
    yq=
    '''





'''from django.db import connection
from django.db import transaction
from django.http import HttpResponse

def db_conn(requests):
    transaction.set_autocommit(0)
    cursor = connection.cursor()
    # cursor.execute('Create table Test(ID number, Name Text(30), Age number)')
    cursor.execute('Insert into echarts_allinfo(confirm) values(de)')
    cursor.execute('commit')
    confirm = [row[0] for row in cursor.fetchall()]
    connection.close
    transaction.commit()
    return HttpResponse(confirm)
#pds.to_excel('国内疫情统计表1.xlsx',index=True)
b=AllInfo()
b.confirm='33'
b.save()'''




'''import json
import requests
import pandas as pd
from bs4 import BeautifulSoup
url = 'https://ncov.dxy.cn/ncovh5/view/pneumonia'
headers = {
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36'
    }
response = requests.get(url,headers = headers)
response.encoding = 'utf-8'
content = response.content.decode('utf-8')#以字节流形式打印网页源码
soup = BeautifulSoup(response.text,'lxml')
#爬取选择网页文档的内容
data = soup.find_all(name = 'script',attrs = {'id':'getListByCountryTypeService2true'})
#转为字符串
account = str(data)
account1 = account[95:-21]#切片截取从52到后面倒数21取到需要的数据
account1_json = json.loads(account1)


#提取数据到列表
id = []
continents = []
provinceName = []
currentConfirmedCount = []
confirmedCount = []
confirmedCountRank = []
suspectedCount = []
curedCount = []
deadCount = []
deadCountRank = []
deadRate = []
deadRateRank = []
print(len(account1_json))
i=0
for a in account1_json:
    if 'id' in a:
        id.append(a['id'])
    else:
        id.append('没有')
    continents.append(a['continents'])
    provinceName.append(a['provinceName'])
    currentConfirmedCount.append(a['currentConfirmedCount'])
    confirmedCount.append(a['confirmedCount'])
    if 'confirmedCountRank' in a:
        confirmedCountRank.append(a['confirmedCountRank'])
    else:
        confirmedCountRank.append('没有')
    suspectedCount.append(a['suspectedCount'])
    curedCount.append(a['curedCount'])
    deadCount.append(a['deadCount'])
    if 'deadCountRank' in a:
        deadCountRank.append(a['deadCountRank'])
    else:
        deadCountRank.append('没有')
    if 'deadRate' in a:
        deadRate.append(a['deadRate'])
    else:
        deadRate.append('没有')
    if 'deadRateRank' in a:
        deadRateRank.append(a['deadRateRank'])
    else:
        deadRateRank.append('没有')

#转换成pandas数组
df = {
    'id':pd.Series(id),
    '所在大洲':pd.Series(continents),
    '城市':pd.Series(provinceName),
    '当前确诊':pd.Series(currentConfirmedCount),
    '累计确诊':pd.Series(confirmedCount),
    '确诊排名':pd.Series(confirmedCountRank),
    '疑似病例':pd.Series(suspectedCount),
    '治愈人数':pd.Series(curedCount),
    '死亡人数':pd.Series(deadCount),
    '死亡人数排名':pd.Series(deadCountRank),
    '死亡率':pd.Series(deadRate),
    '死亡率排名':pd.Series(deadRateRank)
}
pds = pd.DataFrame(df)
print(pds)
#pds.to_excel('1.xlsx', index=False)
'''
'''import requests
import sqlite3
from bs4 import  BeautifulSoup
import re
import json
class YQ():
    def __init__(self):
        self.url="https://wp.m.163.com/163/page/news/virus_report/index.html"
        self.header={'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36'}
        self.db=sqlite3.connect(database='db')
    def GetData(self):
        req=requests.get(self.url,headers=self.header)
        req.encoding = 'utf-8'
        soup=BeautifulSoup(req.text)
        cover_input=soup.find('div',attrs={'class':'cover_input'}).text
        temp=re.findall(r"确诊(.+?)",cover_input)
        #data_json=json.loads(req.text)
        print(temp)
        #data=data_json['data']
        #data_province=data['areaTree'][2]['children']
        #info=pd.DataFrame(data_province)[['name']]
        #print(info)
        #data_total=data['chinaTotal']
        #for i in range(len(data_province)):
         #   print(data_province[i]['name'],data_province[i]['lastUpdateTime'])


    def SaveData(self):
        pass
if __name__=="__main__":
    yq=YQ()
    yq.GetData()
'''