#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
 function :bilibili 自动提醒更新内容
 author : leilei
'''

import os
import requests
from bs4 import BeautifulSoup
import random
import re
import urllib.parse
import json
import pymysql
import time



def get_upuser(kds):
    query = {
        'keyword':kds
    }
    #拼接查询字符串 查询up主信息
    url_keyword = urllib.parse.urlencode(query)
    url = 'https://search.bilibili.com/upuser?'+url_keyword;
    req = requests.get(url)
    soup = BeautifulSoup(req.text,'html.parser')
    uper_link = soup.find_all('a', attrs={
        'class': 'title',
        'se-linkid':"upuser_title_1"
    });
    uper_link = list(uper_link)[0];
    return uper_link

def header():
    headers='''Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11
Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10
'''
    headerlist = headers.split('\n')
    length = len(headerlist)
    return headerlist[random.randint(0,length-1)]

def get_pages(headers,uper_link):
    up_id_str = uper_link['href'];
    up_id_list =  (up_id_str.split('?'));
    up_num = re.sub("\D", "", up_id_list[0])
    data = {
        'mid':up_num,
        'pagesize':'30',
        'tid':'0',
        'page':'1',
        'keyword':'',
        'order':'pubdate',
    }
    space_link = 'https://space.bilibili.com/ajax/member/getSubmitVideos';
    gj_link_list = requests.post(space_link,data=data,headers=headers)
    Json_info = gj_link_list.text;
    dict_info = json.loads(Json_info)
    pages = dict_info['data']['pages'];
    info ={
        'pages':pages,
        'mid':up_num
    }
    return info


#存储信息 放入列表里面
def get_list(info,headers):
    one_list = []
    for i in range(1,int(info['pages'])+1):
        data = {
            'mid':info['mid'],
            'pagesize':'30',
            'tid':'0',
            'page':i,
            'keyword':'',
            'order':'pubdate',
        }
        space_link = 'https://space.bilibili.com/ajax/member/getSubmitVideos';
        gj_link_list = requests.post(space_link,data=data,headers=headers)
        Json_info = gj_link_list.text;
        dict_info = json.loads(Json_info)
        data = dict_info['data']['vlist'];
        count = dict_info['data']['count'];
        one_list.append(data)
        one_list.append(count)
    return one_list


if __name__ == "__main__":
    print('notice:此程序针对up主存在我的稿件的情况')
    f = open('upser.txt','r+')
    str = f.read()
    kds_list = str.split('\n')
    for kds in kds_list:
        uper_links = get_upuser(kds);
        headers = {
            'User-agent': header()
        }
        info = get_pages(headers, uper_links)
        list_data = get_list(info, headers)
        str_r = ""
        fu = ""
        for index,i in enumerate(list_data[0]):
            fu = "" if index == len(list_data[0])-1 else ","
            jsonstr = json.dumps(i,ensure_ascii=False)
            str_r += jsonstr + fu
        str_js = str_r
        str_json = str_js.replace("/n",'').replace('"','')
        count1 = list_data[1]
        time1 = int(time.time())
        uper_name = kds

        #搜索
        # 打开数据库连接 传入数据库参数
        config = {
            'host': 'localhost',
            'user': 'root',
            'password': '1995',
            'db': 'upuser',
            'charset': 'utf8'
        }
        db = pymysql.connect(**config)
        # 使用cursor()方法获取操作游标
        cursor = db.cursor()
        #查询语句
        sql_se = "SELECT * FROM upinfo \
               WHERE uper_name = '{up_name}'".format(up_name=kds)
        cursor.execute(sql_se)
        results = list(cursor.fetchall())
        if(results):
            print ("数据已经存在,更新中")
            #更新数据
            sql_up = 'UPDATE upinfo SET uper_list = "{uper_list}",uper_total="{uper_total}",updatetime="{updatetime}" WHERE up_name ="{up_name}"'.format(uper_list=str_json,uper_total=count1,updatetime=time1,up_name=uper_name)
            try:
                # 执行SQL语句
                cursor.execute(sql_up)
                # 提交到数据库执行
                db.commit()
            except:
                # 发生错误时回滚
                db.rollback()
            print('数据更新完成')
        else:
            print("数据收集中")
            # SQL 插入语句
            sql = """INSERT INTO upinfo(uper_name,uper_list, uper_total, updatetime, addtime)
                     VALUES ('{up_name}', "{uper_list}",'{uper_total}', '{updatetime}', '{addtime}')""".format(up_name=uper_name,uper_list=str_json,uper_total=count1,updatetime=time1,addtime=time1)
            # print (sql)
            # 执行sql语句
            try:
                cursor.execute(sql)
                # 提交到数据库执行
                db.commit()
                print('数据收集完成')
            except:
                print('插入失败,请检查数据格式')
        # 关闭数据库连接
        db.close()
        #关闭打开的文件
    f.close()


