import time
import requests
import json
from tqdm import tqdm
import streamlit as st
from importlib import import_module
import torch
import matplotlib.pyplot as plt
import pandas as pd
from train_eval import evaluate
from utils import build_dataset, build_iterator
import numpy as np

plt.rcParams['font.sans-serif'] = ["SimHei"]
plt.rcParams["axes.unicode_minus"] = False
plt.style.use('ggplot')

headers = {
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 '
                  'Safari/537.36',
    'cookie': 'RK=XSvkyhZFFB; ptcz=c0bca4552a9022e55607e53302bc1d9221eab04c3f8b2deae7a3b6c472356f3c; '
              'pgv_pvid=3705911854; o_cookie=465296275; pac_uid=1_465296275; _ga=GA1.1.1538594322.1669381469; '
              '_ga_0EKMG65RQ9=GS1.1.1669381469.1.1.1669382717.0.0.0; Qs_lvt_323937=1666694271,1671783542,1671797488,'
              '1671798089; Qs_pv_323937=4469447608719551000,2748670330325534000,4047638602340355000,'
              '3544655403446810000; luin=o0465296275; '
              'lskey=000100008a3a034a65e85964b12c68ecca58b4647884396ca29b0a16097eaeeb7b8cf65946f7094e80b26595; iip=0; '
              'pgv_info=ssid=s6845133176; uin=o0465296275; skey=@4EUGQmG3V; ariaDefaultTheme=default; ariaFixed=true; '
              'ariaReadtype=1; ariaoldFixedStatus=false; ariaStatus=false '
}
cate_dic = {
    3: '教育',
    4: '科技',
    1: '房产',
    6: '体育',
    8: '娱乐',
    7: '游戏',
    2: '股票',
    5: '国际',
    0: '财经'
}


def uni_evaluate(model, uni_iter):
    model.eval()
    predict_all = np.array([], dtype=int)
    with torch.no_grad():
        for texts, labels in uni_iter:
            outputs = model(texts)
            pred = torch.max(outputs.data, 1)[1].cpu().numpy()
            predict_all = np.append(predict_all, pred)
    return predict_all


def predict(flag, many=True):
    config = flag.Config('THUCNews')
    prob_data, uni_data = build_dataset(config, flag=False)

    model = flag.Model(config).to(config.device)
    model.load_state_dict(torch.load(config.save_path))
    model.eval()

    if many:
        prob_iter = build_iterator(prob_data, config)
        pred = evaluate(config, model, prob_iter)
    else:
        uni_iter = build_iterator(uni_data, config)
        pred = uni_evaluate(model, uni_iter)
    return pred


def crawl():
    crawl_dic = {
        'edu': 3,
        'tech': 4,
        'house': 1,
        'sports': 6,
        'ent': 8,
        'games': 7,
        'world': 5,
        'finance': 0
    }
    progre = 0
    my_bar = st.progress(0)
    for cate in crawl_dic.keys():
        print(cate)
        RES = []
        for i in tqdm(range(0, 50, 20)):
            time.sleep(.8)
            js = requests.get(
                'https://i.news.qq.com/trpc.qqnews_web.kv_srv.kv_srv_http_proxy/list?sub_srv_id={}&srv_id=pc&offset={}&limit=20&strategy=1'.format(
                    cate, i) + '&ext={%22pool%22:[%22top%22,%22hot%22],%22is_filter%22:7,%22check_type%22:true}',
                headers=headers)
            jsons = json.loads(js.text)["data"]["list"]
            RES.extend([_["title"] for _ in jsons])
        with open('THUCNews/data/prob.txt', mode='a', encoding='utf-8') as f:
            for item in RES:
                f.write('{}\t{}\n'.format(item, crawl_dic[cate]))
        f.close()
        progre += 10
        my_bar.progress(progre)
    RES = []
    for i in tqdm(range(1, 6)):
        time.sleep(.8)
        js = requests.get(
            'https://www.mxnzp.com/api/news/list?typeId=514&page={}&app_id=emhsknemgdp9msob&app_secret=Wmx6dU10d1VKM2hkd05oUVdWSXBxUT09'.format(
                i))
        jsons = json.loads(js.text)["data"]
        RES.extend([_["title"] for _ in jsons])
    with open('THUCNews/data/prob.txt', mode='a', encoding='utf-8') as f:
        for item in RES:
            f.write('{}\t{}\n'.format(item, 2))
    f.close()
    my_bar.progress(100)


text_input = st.text_input(
    "Enter One New 👇",
    ""
)

x = import_module('models.bert')

if text_input:
    with open('THUCNews/data/uni.txt', mode='w', encoding='utf-8') as f:
        f.write('{}\t{}\n'.format(text_input, 9))
    f.close()
    label = predict(x, many=False)
    st.header(cate_dic[label[0]])

button = st.button(
    "实时爬取"
)
if button:
    with st.spinner("数据爬取中..."):
        remove = open('THUCNews/data/prob.txt', "r+")
        remove.truncate()
        crawl()
        label = predict(x)
        df = pd.read_table('THUCNews/data/prob.txt', header=None)
        df[1] = [cate_dic[_] for _ in label]
        df.columns = ["news", 'label']
        st.dataframe(df, use_container_width=True)
        df["label"].value_counts()
        df.to_excel('result.xlsx')
        fig, ax = plt.subplots()
        ax.pie(x=df["label"].value_counts().values, labels=df["label"].value_counts().index, autopct='%0.1f%%',
               wedgeprops=dict(width=0.5, edgecolor='w'))
        st.pyplot(fig)
