import hashlib
import time
import pandas as pd
import pymysql as pymysql
import requests
from pyecharts.charts import Bar, Line, Pie, Map, Grid
from sqlalchemy import create_engine
from pyecharts import options as opts


def get_conn():
    conn = pymysql.connect(host="127.0.0.1",
                           user="root",
                           password="123456",
                           db="ssssss",
                           charset="utf8")
    cursor = conn.cursor()
    return conn, cursor


def close_conn(conn, cursor):
    cursor.close()
    conn.commit()
    conn.close()


def query(sql, *args):
    conn, cursor = get_conn()
    cursor.execute(sql, args)
    res = cursor.fetchall()
    close_conn(conn, cursor)
    return res


def get_all_data():
    sql = "select sum(nowConfirm),sum(dead), sum(heal),sum(wzz) from yiqing"
    res = query(sql)
    return list(res[0])


def get_all():
    sql = 'select * from yiqing'
    res = query(sql)
    return res


def get_bar():
    data = get_all()
    df = pd.DataFrame(data, columns=['id', 'province', 'nowConfirm', 'dead', 'heal', 'wzz'])
    bar = (
        Bar(init_opts=opts.InitOpts(theme='dark'))
            .add_xaxis(list(df['province'].values)[:6])
            .add_yaxis("死亡", df['dead'].values.tolist()[:6])
            .add_yaxis("治愈", df['heal'].values.tolist()[:6])
            .set_series_opts(label_opts=opts.LabelOpts(color="white"))
            .set_global_opts(
            title_opts=opts.TitleOpts(title="各地区确诊人数与死亡人数",
                                      title_textstyle_opts=opts.TextStyleOpts(color="white", font_size=13)),
            datazoom_opts=[opts.DataZoomOpts()],
            yaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(font_size=12, color="white"),
                                     axisline_opts=opts.AxisLineOpts(linestyle_opts=opts.LineStyleOpts(color="white"))),
            xaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(font_size=12, color="aqua"),
                                     axisline_opts=opts.AxisLineOpts(linestyle_opts=opts.LineStyleOpts(color="white"))),
            legend_opts=opts.LegendOpts(textstyle_opts=opts.TextStyleOpts(color="white"))
        )
    )
    return bar.dump_options()


def insert_hainanzhudao():
    sql = 'insert into hainan select * from yiqing where province="海南"'
    query(sql)
    sql1 = 'UPDATE hainan set province="南海诸岛" ,   id=35'
    query(sql1)
    sql2 = 'insert into yiqing select * from hainan'
    query(sql2)
    sql3 = 'delete from hainan'
    query(sql3)


def get_map():
    data = get_all()
    df = pd.DataFrame(data, columns=['id', 'province', 'nowConfirm', 'dead', 'heal', 'wzz'])
    china_map = (
        Map()
            .add("现有确诊", [list(i) for i in zip(df['province'].values.tolist(), df['nowConfirm'].values.tolist())],
                 "china")
            .set_series_opts(label_opts=opts.LabelOpts(color="white"))
            .set_global_opts(
            title_opts=opts.TitleOpts(title="各地区确诊人数",
                                      title_textstyle_opts=opts.TextStyleOpts(color="white", font_size=20)),
            visualmap_opts=opts.VisualMapOpts(max_=600, textstyle_opts=opts.TextStyleOpts(color="white")),
            legend_opts=opts.LegendOpts(textstyle_opts=opts.TextStyleOpts(color="white", font_size=15))

        )
    )
    return china_map.dump_options()


def get_line():
    data = get_all()
    df = pd.DataFrame(data, columns=['id', 'province', 'nowConfirm', 'dead', 'heal', 'wzz'])
    line = (

        Line()
            .add_xaxis(list(df['province'].values)[:12])
            .add_yaxis("治愈", df['heal'].values.tolist()[:12])
            .add_yaxis("死亡", df['dead'].values.tolist()[:12], linestyle_opts=opts.LineStyleOpts(color="gold"))
            .set_series_opts(label_opts=opts.LabelOpts(color="white"))
            .set_global_opts(
            title_opts=opts.TitleOpts(title="死亡与治愈", title_textstyle_opts=opts.TextStyleOpts(color="white")),
            yaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(font_size=12, color="white"),
                                     axisline_opts=opts.AxisLineOpts(linestyle_opts=opts.LineStyleOpts(color="white"))),
            xaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(font_size=12, color="aqua"),
                                     axisline_opts=opts.AxisLineOpts(linestyle_opts=opts.LineStyleOpts(color="white"))),
            legend_opts=opts.LegendOpts(textstyle_opts=opts.TextStyleOpts(color="white"))
        )
    )
    return line.dump_options()


def get_pie():
    data = get_all()
    df = pd.DataFrame(data, columns=['id', 'province', 'nowConfirm', 'dead', 'heal', 'wzz'])[:8]
    df.sort_values(by=['nowConfirm'], ascending=False)
    pie = (
        Pie()
            .add(
            "",
            [list(i) for i in zip(df['province'].values.tolist(), df['nowConfirm'].values.tolist())],
            radius=["10%", "30%"]
        )
            .set_global_opts(
            legend_opts=opts.LegendOpts(orient="vertical", pos_top="70%", pos_left="70%",
                                        textstyle_opts=opts.TextStyleOpts(color="white"))

        )
            .set_series_opts(label_opts=opts.LabelOpts(formatter="{b}: {c}", color="white"))
    )

    return pie.dump_options()


def insert_data(datafile1, datafile2, datafile3, datafile4):
    # 初始化数据库连接，使用pymysql模块
    # MySQL的用户：root, 密码:125678, 端口：3306,数据库：cov
    engine = create_engine('mysql+pymysql://root:123456@localhost:3306/ssssss')
    # 将新建的DataFrame储存为MySQL中的数据表，储存index列
    df1 = pd.read_csv(datafile1)
    df2 = pd.read_csv(datafile2)
    df3 = pd.read_csv(datafile3)
    df4 = pd.read_csv(datafile4)
    df1.to_sql('yiqing', engine, index=False, if_exists='replace')
    df2.to_sql('higharea', engine, index=False, if_exists='replace')
    df3.to_sql('middlearea', engine, index=False, if_exists='replace')
    df4.to_sql('count', engine, if_exists='replace')


def gettime():
    time_str = time.strftime("%Y{}%m{}%d{} %X")
    return time_str.format("年", "月", "日")


def yiqing_area():
    url_ = "http://103.66.32.242:8005/zwfwMovePortal/interface/interfaceJson"
    timestamp_ = str(time.time())[:10]
    no256sign = str(timestamp_) + "23y0ufFl5YxIyGrI8hWRUZmKkvtSjLQA" + "123456789abcdefg" + str(timestamp_)
    signature = hashlib.sha256(no256sign.encode('utf-8')).hexdigest().upper()

    data_ = {"appId": "NcApplication",
             "key": "3C502C97ABDA40D0A60FBEE50FAAD1DA",
             "nonceHeader": "123456789abcdefg",
             "paasHeader": "zdww",
             "signatureHeader": signature,
             "timestampHeader": str(timestamp_)}

    no256smt_sig = str(timestamp_) + "fTN2pfuisxTavbTuYVSsNJHetwq5bJvCQkjjtiLM2dCratiA" + str(timestamp_)
    smt_sig = hashlib.sha256(no256smt_sig.encode('utf-8')).hexdigest().upper()

    headers_ = {'x-wif-nonce': 'QkjjtiLM2dCratiA',
                'x-wif-paasid': 'smt-application',
                'x-wif-signature': smt_sig,
                'x-wif-timestamp': str(timestamp_),
                'Content-Type': "application/json; charset=UTF-8",
                }
    getedthings = requests.post(url_, json=data_, headers=headers_).json()
    '''print(str(getedthings))
    print(timestamp_)
    print(signature)'''
    utime = getedthings['data']['end_update_time']  # 更新时间
    hcount = getedthings['data'].get('hcount', 0)  # 高风险地区个数
    mcount = getedthings['data'].get('mcount', 0)  # 低风险地区个数

    count = []
    count.append([hcount, mcount])
    # 具体数据
    hlist = getedthings['data']['highlist']
    mlist = getedthings['data']['middlelist']

    risk_h = []
    risk_m = []

    for hd in hlist:
        type = "高风险"
        province = hd['province']
        city = hd['city']
        county = hd['county']
        communitys = hd['communitys']
        for x in communitys:
            risk_h.append([utime, province, city, county, x, type])

    for md in mlist:
        type = "中风险"
        province = md['province']
        city = md['city']
        county = md['county']
        communitys = md['communitys']
        for x in communitys:
            risk_m.append([utime, province, city, county, x, type])

    df1 = pd.DataFrame(risk_h, columns=['end_update_time', 'province', 'city', 'county', 'address', 'type'])
    df2 = pd.DataFrame(risk_m, columns=['end_update_time', 'province', 'city', 'county', 'address', 'type'])
    df3 = pd.DataFrame(count, columns=['高风险地区个数', '中风险地区个数'])
    df1.to_csv('highlist.csv', index=False)
    df2.to_csv('middlelist.csv', index=False)
    df3.to_csv('count.csv', index=False)


def get_r2_data():
    sql = "select * from higharea UNION SELECT * from middlearea ORDER BY end_update_time DESC LIMIT 18"
    res = query(sql)
    return res


def get_count():
    sql = 'select * from count '
    res = query(sql)
    return res


def paqu_data():
    url = 'https://api.inews.qq.com/newsqa/v1/query/inner/publish/modules/list?modules=statisGradeCityDetail,' \
          'diseaseh5Shelf '
    response = requests.get(url, verify=True)
    json_data = response.json()['data']
    china_data = json_data['diseaseh5Shelf']['areaTree'][0]['children']  # 列表
    data_set = []
    count = 1
    for i in china_data:
        data_dict = {'id': count, 'province': i['name'], 'nowConfirm': i['total']['nowConfirm'],
                     'dead': i['total']['dead'],
                     'heal': i['total']['heal'], 'wzz': i['total']['wzz']}
        count += 1
        # 地区名称
        # 新增确认
        # 死亡人数
        # 治愈人数

        data_set.append(data_dict)
    df = pd.DataFrame(data_set)
    df.to_csv('yiqing.csv', index=False)
