# -*- coding: UTF-8 -*-
import scrapy
import messAround.util.help as util


# B站-热搜
# https://search.bilibili.com/
class BilibiliSearchSpider(scrapy.Spider):
    name = 'bilibili_search'

    allowed_domains = ['search.bilibili.com']

    start_urls = ['http://search.bilibili.com/']

    custom_settings = {
        'ITEM_PIPELINES': {
            'messAround.pipeline.bilibili.BiliBiliSearchPipeline': 300
        }
    }

    def start_requests(self):
        url = "https://s.search.bilibili.com/main/hotword"
        yield scrapy.Request(url=url, callback=self.parse, headers=util.default_headers)

    def parse(self, response):
        result = util.make_json(response.text)
        if result['message'] == "success":
            for key, val in enumerate(result['list']):
                yield {
                    'no': key + 1,
                    'keyword': val['show_name'],
                    'link': 'https://search.bilibili.com/all?keyword=' + val['keyword'],
                }
            pass
