# -*- coding: utf-8 -*-

import webview
import json
import os
import requests
from bs4 import BeautifulSoup
import time
import webbrowser
import easygui

# 网络检测
try:
    requests.get("https://api-takumi-static.mihoyo.com",timeout=5)
except:
    easygui.msgbox(msg="无法连接到服务器\n请确保网络可用后再试",title="错误")
    os._exit(0)

def read_json(file):
    with open(file,"r")as f:
        d = json.load(f)
        f.close()
    return d
def dump_json(file,data):
    with open(file,"w")as f:
        json.dump(data,f)
        f.close()

def get_Web_inf(part,page):
    data_url={
        "sr":"https://api-takumi-static.mihoyo.com/content_v2_user/app/1963de8dc19e461c/getContentList?iPage=<!page!>&iPageSize=5&sLangKey=zh-cn&isPreview=0&iChanId=255",
        "ys":"https://api-takumi-static.mihoyo.com/content_v2_user/app/16471662a82d418a/getContentList?iAppId=43&iChanId=719&iPageSize=5&iPage=<!page!>&sLangKey=zh-cn",
        "zzz":"https://api-takumi-static.mihoyo.com/content_v2_user/app/706fd13a87294881/getContentList?iPageSize=9&iPage=<!page!>&sLangKey=zh-cn&iChanId=273"
    }
    r = requests.get(data_url[part].replace("<!page!>",str(page)))
    this_page_data = json.loads(str(r.content,"utf-8"))["data"]["list"]
    result_data = []
    if part=="sr":
        for d in this_page_data:
            content_id = d["iInfoId"]
            title = d["sTitle"]
            up_time = d["dtStartTime"]
            try:
                d_ext = json.loads(d["sExt"])
                banner = d_ext["news-poster"][0]["url"]
            except:
                banner = "sr_null"
            p_data = {"id":content_id,"banner":banner,"title":title,"time":up_time}
            result_data.append(p_data)
    elif part=="ys":
        for d in this_page_data:
            content_id = d["iInfoId"]
            title = d["sTitle"]
            up_time = d["dtStartTime"]
            ext_data = json.loads(d["sExt"])
            try:
                banner = ext_data[d["sChanId"][0]+"_1"][0]["url"]
            except:
                print(str(content_id)+" no_banner")
                banner = "https://ys.mihoyo.com/main/_nuxt/img/holder.37207c1.jpg"
            p_data = {"id":content_id,"banner":banner,"title":title,"time":up_time}
            result_data.append(p_data)
    elif part=="zzz":
        for d in this_page_data:
            content_id = d["iInfoId"]
            title = d["sTitle"]
            up_time = d["dtStartTime"]
            ext_data = json.loads(d["sExt"])
            try:
                banner = ext_data["news-banner"][0]["url"]
            except Exception as e:
                print(e)
                print(str(content_id)+" no_banner")
                banner = "https://ys.mihoyo.com/main/_nuxt/img/holder.37207c1.jpg"
            p_data = {"id":content_id,"banner":banner,"title":title,"time":up_time}
            result_data.append(p_data)
    return result_data

dMatch = {
    "ys":"YS_allData",
    "sr":"sr_allData",
    "zzz":"zzz_allData",
    "finder":"mhy_finder"
}
part_list = ["ys","sr","zzz"]

def up_data():
    for p in part_list:
        this_pD = read_json(dMatch[p]+".json")
        page_count = 1
        same_count = 0
        while True:
            get_this_p = get_Web_inf(p,page_count)
            for this_new in get_this_p:
                if str(this_new["id"]) in this_pD:
                    same_count+=1
                    continue
                this_pD[this_new["id"]]=this_new
            if same_count >= 10:
                break
            if len(get_this_p) <=0:
                break
            page_count+=1
        dump_json(dMatch[p]+".json",this_pD)

class api():
    def get_news(self,part):
        new_datas = []
        for p in range(2):
            this_p_D = get_Web_inf(part,p+1)
            for d in this_p_D:
                new_datas.append(d)
        return {"success":True,"data":new_datas}
    def search(self,p,kw):
        this_pD = read_json(dMatch[p]+".json")
        result_list = {}
        for new in this_pD:
            this_new = this_pD[new]
            if kw in this_new["title"]:
                result_list[this_new["id"]]=this_new
        id_list=[]
        for r in result_list:
            id_list.append(int(result_list[r]["id"]))
        sx_list = []
        for i in range(len(id_list)):
            sx_list.append(max(id_list))
            del id_list[id_list.index(max(id_list))]
        new_result_list = []
        for pid in sx_list:
            new_result_list.append(result_list[pid])
        return {"success":True,"data":new_result_list}
    def run_finder(self,p,aid):
        a_url = {
            "ys":"https://ys.mihoyo.com/main/news/detail/<!aid!>",
            "sr":"https://sr.mihoyo.com/news/<!aid!>",
            "zzz":"https://zzz.mihoyo.com/news/<!aid!>"
        }
        class_match = {
            "ys":".article__content",
            "sr":".main-wrap",
            "zzz":".news-detail-article"
        }
        finder = webview.create_window('finder', url=a_url[p].replace("<!aid!>",aid))
        finder.hide()
        while True:
            try:
                pageSource = finder.evaluate_js("document.documentElement.outerHTML")
                bs = BeautifulSoup(pageSource,"html.parser")
                data_html=bs.select(class_match[p])[0]
                img_list = []
                vid_list = []
                for h in data_html.select("img"):
                    img_list.append(h.get("src"))
                for h in data_html.select("video"):
                    vid_list.append(h.get("src"))
            except:
                pass
            else:
                result_list = {"img":img_list,"video":vid_list}
                finder.destroy()
                break
        return {"success":True,"data":result_list}
    def download(self,videoUrl,file_name,task_show):
        dl_r = requests.get(videoUrl,stream=True)
        total_size = int(dl_r.headers.get("content-length",0))
        loaded = 0
        update_time = time.time()
        with open("download/"+file_name,"wb")as f:
            for data in dl_r.iter_content(chunk_size=1024):
                f.write(data)
                loaded += len(data)
                if time.time() - update_time >= 1:
                    window.evaluate_js(f"updateProgress('{task_show}',{str(loaded)},{str(total_size)})")
                    update_time = time.time()
        window.evaluate_js(f"finish_dlTask_show('{task_show}')")
    def updateNews(self):
        up_data()
        return "ok"
    def open_dlDir(self):
        os.startfile("download")
        return "ok"
    def open_link(self,link):
        webbrowser.open(link)
if not os.path.exists("download"):
    os.makedirs("download")

for p in part_list:
    if not os.path.exists(dMatch[p]+".json"):
        d_r = requests.get("https://codevicent.xyz/app/mhy_dl/api/get_ListData/?p="+p)
        print(p)
        r_d = json.loads(str(d_r.content,"utf-8"))
        with open(dMatch[p]+".json","w")as f:
            json.dump(r_d["data"],f)
            f.close()

window = webview.create_window(
    "MHY新闻资源搜索器",
    "view.html",
    width=1200,
    height=600,
    text_select=False,
    confirm_close=True,
    js_api=api(),
)
webview.start()