# Define here the models for your scraped items
# bilibili 排行榜信息获取
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import json
import os
import requests
import scrapy

from scrapy_bilibili_rank.email_int import SendMail
from scrapy_bilibili_rank.items import BankItem

class DemoFirst(scrapy.Spider):
    custom_settings = {
        'ITEM_PIPELINES': {
            'scrapy_bilibili_rank.pipelines.JsonWriterCarPipeline': 300,
        }
    }
    name = "bilibili_bank"
    allowed_domains = ["https://www.bilibili.com/"]

    def start_requests(self):
        content = requests.get('https://api.bilibili.com/x/web-interface/online')
        json = content.json()
        if json.get('code') == 0:
            data = json.get('data')
            data = data.get('region_count')
        else:
            print("获取url失败")
            exit()
        for value in data:
            url ="https://api.bilibili.com/x/web-interface/ranking/v2?rid="+value+"&type=all"
            yield scrapy.Request(url=url, callback=self.parse)

        # print(12)
        # exit()
        # sleep(8)


    def parse(self, response):
        rs = json.loads(response.text)
        if rs.get('code') == 0:
            # 取出数据
            data = rs.get('data')
            # 段子数据
            detail_data = data.get('list')
            # # for循环遍历数据,取出每一条段子
            for dz in detail_data:
                item = BankItem()
                # text = dz.get('group').get('text')
                item['分类ID'] = dz.get('tid')
                item['标题'] = dz.get('title')
                item['标签'] = dz.get('tname')
                item['描述'] = dz.get('desc')
                item['综合得分'] = dz.get('score')
                item['临时播放地址'] = 'https://www.bilibili.com/video/' + dz.get('bvid')
                item['创作人'] = dz.get('owner')['name']
                yield item


    def close(self, spider):
        print(1)
        mail = SendMail(
            username='413118324@qq.com',
            passwd='vbublpiumlknbgec',
            recv='zhe.wang@haochezhu.club',
            title='B站排行榜数据',
            content='B站排行榜数据',
            file='./data.csv',
        )
        mail.send_mail()
        try:
            os.remove(r"./bilibili_rank.json")
            os.remove(r"./data.csv")
        except Exception as e:
            print('移除失败。。', e)
        else:
            print('移除成功！')
        pass
        spider.logger.info('Spider closed: %s', spider.name)
