import json

import pymysql
import scrapy

from dataob.dataob.items import FollowerItem


class FollowerSpider(scrapy.Spider):
    name = "follower"
    allowed_domains = ["api.bilibili.com"]
    # start_urls = ["https://api.bilibili.com/x/relation/stat?vmid=438173577"]
    start_urls = []

    def __init__(self, name=None, **kwargs):
        super().__init__(name, **kwargs)
        # 打开数据库连接
        db = pymysql.connect(port=3306,
                             host='127.0.0.1',
                             user='root',
                             password='root',
                             database='data_observer',
                             cursorclass=pymysql.cursors.DictCursor)
        cursor = db.cursor()

        sql = "SELECT uid FROM o_account"
        try:
            # 执行SQL语句
            cursor.execute(sql)
            # 获取所有记录列表
            results = cursor.fetchall()
            for row in results:
                uid = row['uid']
                print("uid=%s" % uid)
                url = "https://api.bilibili.com/x/relation/stat?vmid=" + str(uid)
                self.start_urls.append(url)
        except Exception as e:
            print("数据库错误=%s" % e)

        # 关闭数据库连接
        db.close()

    def parse(self, response):
        print("getFollower:")
        print(response.text)
        try:
            #解析数据
            follower = json.loads(response.text)
            item = FollowerItem(uid=follower['data']['mid'],
                            follower=follower['data']['follower']
                            )
            yield item
        except Exception as e:
            print("处理返回数据错误：%s" % e)
        pass



