# @Author: 唐奇才
# @Time: 2021/5/4 23:38
# @File: finallyCode.py
# @Software: PyCharm

from multiprocessing.dummy import Pool
import json
import myUtils.myFakerHeaders as mfh
import requests
import myUtils.myJsonUtil as mju
from bs4 import BeautifulSoup
import time
from pandas import DataFrame
from myUtils.mysqlUtils import updateData, getAllMysqlData



def getHTML(url):
    try:
        headers = {'User-Agent': mfh.getFakeHeaders()}
        result = requests.get(url, headers, timeout=10)
        result.encoding = "utf-8"
        html = result.text
        return html
    except:
        print("error is raised")




def getWorksInfo(thread_i_di):
    global count
    global allData
    count = 0
    html = getHTML(homeUlr)
    bs = BeautifulSoup(html, 'html.parser')
    data = bs.findAll("li", {"class": "rank-item"})
    allData = []
    for i in range(len(data) // 10):
        pageNumber = i * 10 + thread_i_di
        if pageNumber < len(data):
            oid = data[pageNumber]['data-id']

            info = data[pageNumber].find("div", {'class': 'info'})
            videoUrl = 'https:' + info.a["href"]
            content = info.a.text
            detail = str(info.find("div", {"class": "detail"}).text).strip().replace("\n", "").split(" ")
            detail = [d for d in detail if d != ""][:2]
            hotLevel = info.find("div", {"class": "pts"}).div.text
            detailInfo = getFansNum(videoUrl)
            fans = detailInfo[0]
            dmnum = detailInfo[1]
            like = detailInfo[2]
            coin = detailInfo[3]
            collect = detailInfo[4]
            share = detailInfo[5]
            tags = detailInfo[6]
            info = detailInfo[7]
            count += 1
            allData.append([oid, hotLevel, detail[0], detail[1], videoUrl, content,
                            fans,dmnum, like, coin, collect, share, tags, info])


def useThread():
    global homeUlr
    homeUlr = "https://www.bilibili.com/v/popular/rank/all"
    # 线程
    thread = 10

    thread_i = [j
                for j in range(0, thread)
                ]

    pool = Pool(thread)
    pool.map(getWorksInfo, thread_i)
    pool.close()
    pool.join()
    print("thread end")

    # print(allData)
    # df = DataFrame(allData,columns=['oid', 'hotLevel', 'playnum', 'relies', 'fans', 'videoUrl', 'content'])
    df = DataFrame(allData,columns=['oid', 'hotLevel', 'playnum', 'relies', 'videoUrl', 'content',
                                    'fans',"dmnum", "like", "coin", "collect", "share", "tags", "info"])
    # print(df)
    # print(df)
    df.to_excel("./allHot.xlsx")
    # DataFrame.to_excel(allData, ['oid', 'hotLevel', 'playnum', 'relies', 'fans', 'videoUrl', 'content'])


def upTable():
    useThread()
    # updateData(allData)

    # 做到定时更新
    # count = 0
    # while True:
    #     useThread()
    #     updateData(allData)
    #     count+=1
    #     print("第{}更新".format(count))
    #     time.sleep(10)


def getFansNum(url):
    try:
        html = getHTML(url)
        # with open("./detailHTML.html", "w", encoding="utf-8") as f:
        #     f.write(html)
        bs1 = BeautifulSoup(html, "html.parser")
        fans = bs1.find("div", {'class':'up-info_right'})
        ops = bs1.find("div", {"class", "ops"}).findAll("span")
        dm = bs1.find("span", {"class": "dm"})

        # 标签内容
        tcs = bs1.find("ul", {"class": "tag-area clearfix"}).find_all("li")

        ts = [tc.text.strip() for tc in tcs]
        tags = ",".join(ts)
        dmnum = dm.text[:-2]
        like = ops[0].text.strip()
        coin = ops[1].text.strip()
        collect = ops[2].text.strip()
        share = ops[3].text.strip()
        info = bs1.find("div", {"class": "info open"}).text.strip("").replace("\n", ",").replace(" ", "")

        # print(fans)
        fans = fans.findAll("span")[-1].text
        return [fans, dmnum, like, coin, collect, share, tags, info]
    except:
        return [fans, dmnum, None, coin, collect, share, tags, info]



def main():
    upTable()
    # detailURL = "https://www.bilibili.com/video/BV1GU4y1t7Bw"
    # print(getFansNum(detailURL))


    # data = getAllMysqlData()
    #
    # oids = [d[0] for d in data]
    #
    # print(oids)
    pass
    # upTable()




if __name__ == '__main__':
    time0 = time.time()
    main()
    time1 = time.time()
    print("总花费时间:{}s".format(time1 - time0))