
import pymysql
from lxml import etree
from index import get_html


def parse1(uid):
    res = get_html("https://api.bilibili.com/x/space/upstat?mid={}&jsonp=jsonp".format(uid), nc=True).json()
    # print(res)
    data = res['data']
    # print(data)
    view = data['archive']['view']
    likes = data['likes']
    return [view, likes]


def parse2(uid):
    res = get_html("https://api.bilibili.com/x/space/acc/info?mid={}&jsonp=jsonp".format(uid)).json()
    # print(res)
    data = res['data']
    # print(data)
    return [
        data['mid'],
        data['name'],
        data['sex'],
        data['face'],
        data['sign'],
        data['level'],
        data['coins'],
        data['birthday'],
    ]


# 所有作品
def parse3(uid):
    res = get_html(
        "https://api.bilibili.com/x/space/arc/search?mid={}&pn=1&ps=25&index=1&jsonp=jsonp".format(uid)).json()
    # print(res)
    data = res['data']
    # print(data)
    count = data['page']['count']
    vlist = data['list']['vlist']
    vinfo = []
    for vl in vlist:
        info = [
            vl['comment'],
            vl['typeid'],
            vl['play'],
            vl['pic'],
            str(vl['description']).replace('\n', ""),
            vl['title'],
            vl['author'],
            vl['mid'],
            vl['created'],
            vl['length'],
            vl['aid'],
        ]
        vinfo.append(info)
    return count, vinfo


def parse4(uid):
    res = get_html("https://api.bilibili.com/x/relation/stat?vmid={}&jsonp=jsonp".format(uid)).json()
    # print(res)
    data = res['data']
    # print(data)
    return [
        data['following'],
        data['follower'],
    ]


def parse5(uid):
    res = get_html("https://api.bilibili.com/x/space/masterpiece?vmid={}&jsonp=jsonp".format(uid)).json()
    # print(res)
    data = res['data']
    if not data:
        print("不存在代表作！")
        return False
    # print(data)
    else:
        info = []
        for d in data:
            info.append(
                [
                    d['aid'],
                    d['tname'],
                    d['pic'],
                    d['title'],
                    str(d['desc']).replace('\n', "").replace("\r", ""),
                    d['duration'],
                    d['owner']['mid'],
                    d['owner']['name'],
                    d['owner']['face'],
                    d['stat']['view'],
                    d['stat']['danmaku'],
                    d['stat']['reply'],
                    d['stat']['favorite'],
                    d['stat']['coin'],
                    d['stat']['share'],
                    d['stat']['like'],
                    d['short_link'],
                    d["bvid"]
                ]
            )
        return info


def get_up_info(d):
    # print(parse1(d))
    # print(parse2(d))
    # print(parse3(d))
    # print(parse4(d))
    # print(parse5(d))
    up_word = parse3(d)
    up_info = parse1(d) + parse2(d) + parse4(d)
    up_info.append(up_word[0])
    up_word = up_word[1]
    up_best = parse5(d)
    # print(up_info)
    # print(up_word)
    # print(up_best)
    save_up_info(up_info)
    save_up_word(up_word)
    if up_best != []:
        save_up_best(up_best)


def get_ups():
    res = get_html("https://www.bilibili.com/v/popular/rank/all")
    xpt = etree.HTML(res.text)
    datas = xpt.xpath('//div[@class="detail"]/a/@href')
    datas = [str(data).split("/")[-1] for data in datas]
    return datas


def save_up_info(data):
    data = "'" + "','".join([str(d) for d in data]) + "'"
    # print(data)
    try:
        conn = pymysql.connect(host="localhost", user="root",
                               password="1234", db="b-video-stat", port=3306)
        cur = conn.cursor()
        sql = """
               INSERT INTO 
               `b-video-stat`.`up_info`
               (`view`, `likes`, `mid`, `name`, `sex`, `face`, `sign`, `level`, `coins`, `birthday`,`following`,`follower`, `count`) 
               VALUES 
               ({})
           """.format(data)

        cur.execute(sql)
        conn.commit()
        cur.close()
        conn.close()
    except Exception as e:
        print('出错了：', e)


def save_up_word(data):
    try:
        conn = pymysql.connect(host="localhost", user="root",
                               password="1234", db="b-video-stat", port=3306)
        cur = conn.cursor()
        for da in data:
            p = "'" + "','".join([str(d) for d in da]) + "'"
            sql = """
                   INSERT INTO 
                   `b-video-stat`.`up_word`
                   (`comment`, `typeid`, `play`, `pic`, `description`, `title`, `author`, `mid`, `created`, `length`, `aid`) 
                   VALUES 
                   ({})
               """.format(p)

            cur.execute(sql)
            conn.commit()
        cur.close()
        conn.close()
    except Exception as e:
        print('出错了：', e)


def save_up_best(data):
    try:
        conn = pymysql.connect(host="localhost", user="root",
                               password="1234", db="b-video-stat", port=3306)
        cur = conn.cursor()

        for da in data:
            p = "'" + "','".join([str(d) for d in da]) + "'"
            sql = """
                           INSERT INTO 
                           `b-video-stat`.`up_best`
                           (`aid`, `tname`, `pic`, `title`, `desc`, `duration`, `mid`, `name`, `face`, `view`, `danmaku`, `reply`, `favorite`, `coin`, `share`, `like`, `short_link`, `bvid`) 
                           VALUES 
                           ({})
                       """.format(p)
            cur.execute(sql)
            conn.commit()

        cur.close()
        conn.close()
    except Exception as e:
        print('出错了：', e)


def main():
    # get_up_info('1511317')
    data = get_ups()
    print(data)
    count = 1
    for d in data:
        print(count, d)
        get_up_info(d)
        count += 1
        # break


if __name__ == '__main__':
    main()

