# -*- coding: utf-8 -*-
"""
    :copyright: (c) 2019 by enjoyZHou on 2019-09-10
"""
import random
import scrapy
from scrapy.crawler import CrawlerProcess


# from scrapy.utils.project import get_project_settings


def from_object(obj):
    """
    class转换dict
    :param obj:
    :return:
    """
    pr = dict()
    for name in dir(obj):
        value = getattr(obj, name)
        if not name.startswith('__') and not callable(value):
            pr[name] = value
    return pr


# 随机选择 User-Agent 的下载器中间件
class RandomUserAgentMiddleware(object):
    def process_request(self, request, spider):
        # 从 settings 的 USER_AGENTS 列表中随机选择一个作为 User-Agent
        user_agent = random.choice(spider.settings['USER_AGENT_LIST'])
        request.headers['User-Agent'] = user_agent
        print('---------------User-Agent-------------------')
        return None

    def process_response(self, request, response, spider):
        # 验证 User-Agent 设置是否生效
        # logger.info("headers ::> User-Agent = " + str(request.headers['User-Agent'], encoding="utf8"))
        return response


class MySetting(object):
    """
    配置
    """
    ROBOTSTXT_OBEY = False
    COOKIES_ENABLED = False
    CONCURRENT_REQUESTS = 100
    DOWNLOAD_DELAY = 0.05

    # 浏览器标识池
    USER_AGENT_LIST = [
        'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:52.0) Gecko/20100101 Firefox/52.0',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
        'Chrome/53.0.2785.116 Safari/537.36 QBCore/3.53.1159.400 QQBrowser/9.0.2524.400',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
        'Chrome/39.0.2171.95 Safari/537.36 MicroMessenger/6.5.2.501 NetType/WIFI WindowsWechat',
        'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) '
        'Chrome/75.0.3770.142 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116 Safari/537.36 '
        'QBCore/3.53.1159.400 QQBrowser/9.0.2524.400 Mozilla/5.0 (Windows NT 6.1; WOW64) '
        'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 '
        'MicroMessenger/6.5.2.501 NetType/WIFI WindowsWechat',
        'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) '
        'Chrome/61.0.3163.79 Safari/537.36 Maxthon/5.2.7.5000 ',
        'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
        'Chrome/47.0.2526.106 BIDUBrowser/8.7 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:68.0) Gecko/20100101 Firefox/68.0',
        'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
        'Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0',
        'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:68.0) Gecko/20100101 Firefox/68.0',
        'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
        'Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3704.400 QQBrowser/10.4.3620.400',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.87 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36',
        'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36',
        'Mozilla/5.0 (Windows NT 5.1; rv:33.0) Gecko/20100101 Firefox/33.0',
    ]
    DEFAULT_REQUEST_HEADERS = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Accept-Encoding': 'gzip, deflate, br',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,zh-TW;q=0.7',
        'Connection': 'keep-alive'
    }

    DOWNLOADER_MIDDLEWARES = {
        'core.scrapy_extract.RandomUserAgentMiddleware': 300,
    }


def get_types_func(key):
    def my_int(val):
        val = str(val)
        return filter(str.isdigit, val)

    types_func_dict = {
        'int': my_int,
    }
    return types_func_dict.get(key) or str


def start_crawl(conf, x_range=None):
    """
    开启爬虫
    :return:
    """
    urls = []

    class MySpider(scrapy.Spider):
        # Your spider definition
        name = 'commmm'
        allowed_domains = conf.get('domains') or ()
        start_urls = conf.get('start_urls')
        c_range = x_range or conf.get('range')
        my_conf = conf

        def parse(self, response):
            res_data = {'url': ''}
            xpath = self.my_conf.get('xpath')
            list_xpath = xpath.get('list')
            x_items = xpath.get('items')  # 可以是数组也可以是对象
            url_tpl = self.my_conf.get('url_tpl') or ''
            c_range = self.c_range
            if list_xpath:
                p_ls = response.xpath(list_xpath)
                # print len(p_ls), '-----------=====', list_xpath
                for index, p in enumerate(p_ls):
                    if c_range:
                        if c_range[0] and index < c_range[0]:
                            continue
                        if c_range[1] and index > c_range[1]:
                            continue
                    if isinstance(x_items, (tuple, list)):
                        params = []
                        for x_i in x_items:
                            p_t = p.xpath(x_i).get()
                            params.append(p_t)
                        url = url_tpl.format(*params)
                        urls.append(url)
                    elif isinstance(x_items, dict):
                        params = {}
                        for x_k, x_v in x_items.items():
                            x_p = x_v.get('xpath')
                            types_func = get_types_func(x_v.get('type'))
                            p_t = p.xpath(x_p).get()
                            params[x_k] = types_func(p_t)
                        url = url_tpl.format(**params)
                        urls.append(url)

            else:
                pass
            # print url_tpl, urls
            return res_data

    process = CrawlerProcess(settings=from_object(MySetting))
    process.crawl(MySpider)
    process.start()  # the script will block here until the crawling is finished

    return urls
