import scrapy
from scrapy.http import HtmlResponse
import time
from weibo_spyder.items import HotItem


class HotSpider(scrapy.Spider):
    name = 'm_hot'
    allowed_domains = ['m.weibo.cn']
    start_urls = ['https://m.weibo.cn/api/container/getIndex?containerid=106003type%3D25%26t%3D3%26disable_hot%3D1%26filter_type%3Drealtimehot&title=%E5%BE%AE%E5%8D%9A%E7%83%AD%E6%90%9C&show_cache_when_error=1&extparam=seat%3D1%26lcate%3D1001%26filter_type%3Drealtimehot%26dgr%3D0%26c_type%3D30%26mi_cid%3D100103%26region_relas_conf%3D0%26cate%3D10103%26pos%3D0_0%26display_time%3D1675314208%26pre_seqid%3D1168968431&luicode=10000011&lfid=231583']
    custom_settings = {
        'ELASTICSEARCH_INDEX': 'hot',
        'ELASTICSEARCH_TYPE': 'HotItem',
        'DOWNLOAD_DELAY': 0.5,
        'RANDOMIZE_DOWNLOAD_DELAY': True,
        'ITEM_PIPELINES': {
            'weibo_spyder.pipelines.HotToESPipeline': 400
        },
    }
    def parse(self, response: HtmlResponse, **kwargs):
        card_groups = response.json()['data']['cards'][0]['card_group']
        for card_group in card_groups:
            hot_item = HotItem()
            # 存储热搜爬入时间
            hot_item['created_time'] = int(float(round(time.time()*1000)))
            # 获取热搜标题
            hot_item['title'] = card_group['desc']
            # 获取热搜热度
            if "desc_extr" in card_group:
                hot_item['hot'] = str(card_group['desc_extr']).split(' ')[-1]
            else:
                hot_item['hot'] = 0

            yield hot_item