import requests
import json
import pandas as pd
from bs4 import BeautifulSoup
import datetime
import time
import re
from lib.db import insertDb,getOne,updateDb

# Replace it with the path of the folder that contains this project.
# path = './'
#爬虫处理，写入mysql数据库
def splider():
    
    headers={
        'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.79 Safari/537.36'
    }
    # 爬取的链接，来源于 https://www.zhihu.com/special/19681091/trends#map
    response = requests.get('https://www.zhihu.com/special/19681091/trends#map', headers=headers)
    soup = BeautifulSoup(response.content.decode("utf-8"),"lxml")
    response.encoding = 'utf-8'
    text = response.text

    articleArr = []
    result = get_array3('"relatedContents":', text)
    lists = result['data']
    for l in lists:
        articleArr.append(l)
    
    for i in range(1,10):
        result = get_array4('"relatedContents":', text,result['start'])
        start = result['start']
        lists = result['data']
        for l in lists:
            articleArr.append(l)
    for article in articleArr:
        check = getOne('article','title',article['title'])
        if(check == 0):
            article['addtime'] = time.time()
            insertDb(article,'article')

    # Get timestamp
    timestamp = get_string('"deadline":"', text)

    todayAdd = soup.find('div',{'class':{'css-ie7fjl'}}).text  #新增确诊
    total = soup.findAll('div',{'class':{'css-1rwr5e8'}})  
    zhiyu = soup.find('div',{'class':{'css-16flxej'}}).text  #全国治愈
    deadnum = soup.find('div',{'class':{'css-jy6bky'}}).text  #累计死亡
    input_add = soup.find('div',{'class':{'css-xcospu'}}).text  #新增输入
    input_add = input_add.split('+')[1]
    grand_total = total[1].text #全国累计确诊
    grand_input = total[0].text  #累计输入病例

    date = str(datetime.datetime.now().year)+'-'+str(datetime.datetime.now().month)+'-'+str(datetime.datetime.now().day)
    data = {
        'differ_diagnosis':todayAdd,
        'input':input_add,
        'date':date,
        'addtime':time.time()
    }
    # 添加到新增表
    check = getOne('add_trend','date',date)
    if(check == 0):
        insertDb(data,'add_trend')
    else:
        updateDb(data,'add_trend',check)
    #添加到总表
    data = {
        'total':grand_total,
        'input':grand_input,
        'zhiyu':zhiyu,
        'dead':deadnum,
        'date':date,
        'addtime':time.time()
    }
    check = getOne('total','date',date)
    if(check == 0):
        insertDb(data,'total')
    else:
        updateDb(data,'total',check)
    provinces = get_array('"domesticList":', text)
    getData()
    # Unravel domestic data
    for i in provinces:
        if(len(i['name']) == 3):
            i['name'] = i['name'][0:2]
        if(i['name'] == '中国香港'):
            i['name'] = '香港'
        if(i['name'] == '黑龙江省'):
            i['name'] = '黑龙江'
        if(i['name'] == '广西壮族自治区'):
            i['name'] = '广西'
        if(i['name'] == '新疆维吾尔自治区'):
            i['name'] = '新疆'
        if(i['name'] == '内蒙古自治区'):
            i['name'] = '内蒙古'
        if(i['name'] == '宁夏回族自治区'):
            i['name'] = '宁夏'
        if(i['name'] == '中国台湾'):
            i['name'] = '台湾'
        if(i['name'] == '西藏自治区'):
            i['name'] = '西藏'

        insertData = {
            'province':i['name'],
            '`nums`':int(i['conNum']),
            '`add`':int(i['addCon']),
            'dead':int(i['deathNum']),
            'zhiyu':int(i['cureNum']),
            # 'xc':int(i['cureNum']),
        }
        check = getOne('diagnosis_province','province',i['name']) 
        if(check == 0):
            insertData['addtime'] = int(time.time())
            print(insertData)
            insertDb(insertData,'diagnosis_province')
        else:
            insertData['mdtime'] = int(time.time())
            updateDb(insertData,'diagnosis_province',check)

def getData():
    url = "http://sa.sogou.com/new-weball/page/sgs/epidemic?type_page=VR"
    headers={
        'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.79 Safari/537.36'
    }
    # 爬取的链接，来源于 https://www.zhihu.com/special/19681091/trends#map
    response = requests.get(url, headers=headers)
    soup = BeautifulSoup(response.content.decode("utf-8"),"lxml")
    
    # with open('test2.txt','w',encoding='utf-8') as f:
    #     f.write(str(soup))
    text= str(soup)
    start = text.index('"area":') + len('"area":')
    count = 1
    end = 0
    data = []
    for i in range(start+1, len(text)):
        if text[i] == '[':
            count += 1
        elif text[i] == ']':
            count -= 1
        if count == 0 and end == 0:
            end = 1
            data = json.loads(text[start:i+1])
    provinces = []
    for i in data:
        if i['provinceName'] == '澳门':
            i['provinceName'] = '中国澳门'
        insertData = {
            'province':i['provinceName'],
            'xc':int(i['currentConfirmedCount']),
        }
        check = getOne('diagnosis_province','province',i['provinceName']) 
        if(check == 0):
            insertData['addtime'] = int(time.time())
            print(insertData)
            insertDb(insertData,'diagnosis_province')
        else:
            insertData['mdtime'] = int(time.time())
            updateDb(insertData,'diagnosis_province',check)

def get_string(signal, text):
    start = text.index(signal) + len(signal)
    end = text.index('"', start)
    return text[start:end]


def get_array(signal, text):
    # "domesticList":
    start = text.index(signal) + len(signal)
    count = 1
    for i in range(start+1, len(text)):
        if text[i] == '[':
            count += 1
        elif text[i] == ']':
            count -= 1
        if count == 0:
            return json.loads(text[start:i+1])
            
def get_array3(signal, text):
    start = text.index(signal) + len(signal)
    count = 1
    for i in range(start+1, len(text)):
        if text[i] == '[':
            count += 1
        elif text[i] == ']':
            count -= 1
        if count == 0:
            return {'data':json.loads(text[start:i+1]),'start':start}

def get_array4(signal, text,start):
    start = text.find(signal,int(start)+1)+len(signal)
    count = 1
    for i in range(start+1, len(text)):
        if text[i] == '[':
            count += 1
        elif text[i] == ']':
            count -= 1
        if count == 0:
            return {'data':json.loads(text[start:i+1]),'start':start}

def unravel(provinces):
    store = []
    for province in provinces:
        province_name = province['name']
        cities = province['cities']
        for city in cities:
            city['province_name'] = province_name
        store += cities
    return store


if __name__ == '__main__':
    # splider()
    getData()
    
