# @Author: 唐奇才
# @Time: 2021/6/13 21:00
# @File: index.py
# @Software: PyCharm


# 控制返回类型，这样就会有提示了 -> requests.get
import pymysql
import requests
import lxml.etree as etree
import time

from fakeagent import get_fake_ua, get_fake_p

start_urls = [
    # 'https://www.bilibili.com/v/popular/rank/all',
    # 'https://www.bilibili.com/v/popular/rank/bangumi',
    # 'https://www.bilibili.com/v/popular/rank/guochuang',
    # 'https://www.bilibili.com/v/popular/rank/douga',
    # 'https://www.bilibili.com/v/popular/rank/music',
    'https://www.bilibili.com/v/popular/rank/dance',
    'https://www.bilibili.com/v/popular/rank/game',
    'https://www.bilibili.com/v/popular/rank/knowledge',
    'https://www.bilibili.com/v/popular/rank/tech',
    'https://www.bilibili.com/v/popular/rank/car',
    'https://www.bilibili.com/v/popular/rank/life',
    'https://www.bilibili.com/v/popular/rank/food',
    'https://www.bilibili.com/v/popular/rank/animal',
    'https://www.bilibili.com/v/popular/rank/kichiku',
    'https://www.bilibili.com/v/popular/rank/fashion',
    'https://www.bilibili.com/v/popular/rank/ent',
    'https://www.bilibili.com/v/popular/rank/cinephile',
    'https://www.bilibili.com/v/popular/rank/origin',
    'https://www.bilibili.com/v/popular/rank/rookie'
]


def get_html(url, is_sleep=True, st=1, nc=False) -> requests.get:
    # cookies = {
    #     "l":"v",
    #     "buvid2": "2E06E51F-F7D4-49E6-A6C8-13D05853CBA1155829infoc",
    #     "Hm_lvt_8a6e55dbd2870f0f5bc9194cddf32a02": "1594527442",
    # }
    cookies = dict(
        buvid3='2E06E51F-F7D4-49E6-A6C8-13D05853CBA1155829infoc',
        LIVE_BUVID='AUTO2515767581818698',
        rpdid="|(J|J~uuYu0J'ul~Y~l)YJR",
        PVID='9',
        blackside_state='1',
        CURRENT_FNVAL='80',
        _uuid='1F00DBBA-BB89-C52A-DBAC-B875A3C50DCF01617infoc',
        CURRENT_QUALITY='80',
        balh_server_inner='__custom__',
        buvid_fp='2E06E51F-F7D4-49E6-A6C8-13D05853CBA1155829infoc',
        buvid_fp_plain='2E06E51F-F7D4-49E6-A6C8-13D05853CBA1155829infoc',
        bp_video_offset_149746646='534925759976319232',
        bp_t_offset_149746646='534926245306578768',
        fingerprint='8d0a062e700bd7f39c3023e1c9019a05',
        SESSDATA='91abf274%2C1639142986%2C27f0f%2A61',
        bili_jct='5c61d76f60cd8a6fb68ead0e439e9a1c',
        DedeUserID='149746646',
        DedeUserID__ckMd5='94ffcc3be97f858f',
        sid='85u1v2mo',
        bfe_id='6f285c892d9d3c1f8f020adad8bed553'
    )
    if is_sleep:
        time.sleep(st)
    if nc:
        res = requests.get(
            url=url, headers=get_fake_ua(),
            proxies=get_fake_p(),
            cookies=cookies
        )
    else:
        res = requests.get(
            url=url, headers=get_fake_ua(),
            # proxies=get_fake_p(),
        )
    print(res.status_code)
    if res.status_code == 200:
        return res
    else:
        get_html(url)


def get_hot_rank(url):
    rt = url.split("/")[-1]
    print("spider stared", url)
    res = get_html(url, is_sleep=False)
    xpd = etree.HTML(res.text)
    scores = xpd.xpath('//ul[@class="rank-list"]/li//div[@class="pts"]/div/text()')
    urls = xpd.xpath('//ul[@class="rank-list"]/li//div[@class="info"]/a/@href')

    urls = ['https:' + i for i in urls]
    for i in range(len(urls)):
        next_url = "https://api.bilibili.com/x/web-interface/view?bvid={}".format(urls[i].split("/")[-1])
        parse(next_url, scores[i], rt)


def get_up_info(mid) -> list:
    url = "https://api.bilibili.com/x/web-interface/card?jsonp=jsonp&mid={}".format(mid)
    res = get_html(url).json()
    data = res['data']['card']
    return [
        str(data['fans']),
        str(data['friend']),
        str(data['attention']),
        str(data['sex'])
    ]


def get_tags(url, remax=5):
    res = get_html(url, is_sleep=False)
    xpd = etree.HTML(res.text)
    tags = xpd.xpath('//div[@id="v_tag"]/ul/li//div//text()')
    if remax == 0:
        return False
    if len(tags) == 0 and remax > 0:
        print('retry:', remax, url)
        get_tags(url, remax - 1)
    else:
        tags = [tag.replace("\n", "").replace(" ", "") for tag in tags]
        tags = "|".join(tags)
        if tags is None:
            get_tags(url)
        return tags


def parse(url, score, rt):
    res = get_html(url).json()
    data = res['data']
    bvid = "https://www.bilibili.com/video/" + data['bvid']
    info = [bvid, score, rt,
            data['aid'], data['pic'], data['title'], data['pubdate'], data['ctime'], data['desc'],
            data['owner']['mid'], data['owner']['name'], data['owner']['face'], data['stat']['view'],
            data['stat']['danmaku'], data['stat']['reply'], data['stat']['favorite'], data['stat']['coin'],
            data['stat']['share'], data['stat']['his_rank'], data['stat']['like'], data['cid']] + \
           get_up_info(data['owner']['mid'])
    tags = get_tags(bvid)
    if not tags:
        tags = "no tags"
    while tags is None:
        tags = get_tags(bvid)
    info.append(tags)
    info = "'" + "','".join([str(i) for i in info]) + "'"
    save_to_mysql(info)


def save_to_mysql(data):
    try:
        conn = pymysql.connect(host="localhost", user="root",
                               password="1234", db="b-video-stat", port=3306)
        cur = conn.cursor()
        sql = """
            INSERT INTO `b-video-stat`.`ranks`
            (`url`, `score`,`type`, `aid`, `pic`, `title`, `pubdate`, `ctime`, `desc`, `mid`, `name`, `face`, `view`, `danmaku`, `reply`, `favorite`, `coin`, `share`, `his_rank`, `like`, `cid`, `follower`, `friend`, `attention`, `sex`, `tags`) 
            VALUES 
            ({})
        """.format(data)

        row = cur.execute(sql)
        if row == 1:
            # print(data[1], end=":")
            print("插入成功！")

        conn.commit()

        cur.close()
        conn.close()
    except Exception as e:
        print('出错了：', e)


def main():
    # parse('https://api.bilibili.com/x/web-interface/view?bvid=BV1mQ4y197qQ', 1000)
    # get_tags('https://www.bilibili.com/video/BV1xV41147Gw')
    for url in start_urls:
        get_hot_rank(url)


if __name__ == '__main__':
    main()
