#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
@File    :   glnav.py    
@Contact :   291622538@qq.com

@Modify Time      @Author    @Version    @Description
------------      -------    --------    -----------
2021/3/20 13:54   fan        1.0         None
"""
from abc import ABC

from scrapy import Spider, Request
from tldextract import tldextract

from website.items import DomainInfo


class ShiJie(Spider, ABC):
    # 爬虫名称
    name = "shijie_com"
    # 专用的通道
    custom_settings = {
        'ITEM_PIPELINES': {'website.pipelines.SaveShiJiePipeline': 600},
    }

    # 爬虫入口
    def start_requests(self):
        # 1-15932
        URL = "http://shijie.haohaoxue.com/topics/%s.html"
        # dont_filter 相同的url只请求一次： False
        for i in range(1, 15933):
            # if i == 2:
            #     break
            yield Request(URL % str(i), callback=self.parse, dont_filter=True)

    def parse(self, response, **kwargs):

        try:

            domainName = "".join(response.xpath('//h2//text()').extract())
            url = "".join(response.xpath('//a[@class="btn-goto pull-left"]/@href').extract()[-1]).replace('"', "")
            val = tldextract.extract(url)
            domain = val.domain + "." + val.suffix
            type = "".join(response.xpath('//ul/li[2]/span[2]//a/text()').extract())
            country = "".join(response.xpath('//ul/li[2]/span[1]//a/text()').extract())
            desc = "".join(response.xpath('//div[@class="box-module introduce-module"]//p//text()').extract())
            # print(response.url)
            if url == "":
                print("没有获取到，重新获取")
                yield Request(response.url, callback=self.parse, dont_filter=True)
            else:

                # print(domainName)
                # print(url)
                # print(domain)
                # print(type)
                # print(country)
                # print(desc)

                domainInfo = DomainInfo(domainName=domainName, url=url, domain=domain, type=type, country=country,
                                        desc=desc)

                yield domainInfo
        except Exception as e:
            print(e)
            print(response.url)
