#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2017/11/13 10:48
# @Author  : Yunhao.Cao
# @File    : usc_crawler.py
from __future__ import unicode_literals
import datetime
from logger import Logger
from requests.exceptions import ProxyError
from downloader import Downloader
from proxy import ProxyManager
from queue import WebRequestQueue, RequestTask
from scheduler import Scheduler
from spider_custom import CustomSpider

__author__ = 'Yunhao.Cao'

__ALL__ = []


def _subprocess(process_id, task_queue, proxy_manager=None, log_config=None):
    Logger.config(**log_config)
    while True:
        task = task_queue.get()
        url = task.url
        # 检测url的合法性, 根据自定义规则匹配
        if not CustomSpider.check_url(url):
            continue

        # 获取代理
        proxy_ip = proxy_manager and proxy_manager.get() or None

        success = True
        response_text = None
        try:
            response_text = Downloader.get(url, proxy_ip=proxy_ip)
        except ProxyError as e:
            success = False
            context = {
                "url": url,
                "time": datetime.datetime.now()
            }
            proxy_manager.add_fail_times(proxy_ip, context=context)
        except Exception as e:
            success = False
            Logger.warning("[_subprocess] Exception = {}".format(e))
            task.fail_time += 1
            if task.fail_time < 3:
                task_queue.put(task)
            else:
                Logger.error("Failure more than 3 times. ({})".format(url))

        if success and response_text:
            CustomSpider.parse(url, response_text, current_task=task, task_queue=task_queue, process_id=process_id)


def _main():
    """
    主函数
    :return:
    """
    # 进程池的大小
    scheduler_size = 3
    # log配置参数
    log_config = {
        "filename": "./out/log.log",
        "level": Logger.DEBUG,
        "format": "%(asctime)s [%(process)-4d] %(levelname)-8s : %(message)s",
        "datefmt": "[%Y-%m-%d %H:%M:%S]",
    }
    # log配置
    Logger.config(**log_config)
    # 请求队列初始化
    queue = WebRequestQueue()
    # 代理管理初始化
    pm = ProxyManager("./proxy.json")

    queue.put(RequestTask("https://creditcard.bankcomm.com/content/pccc/discount/activitylist/more.html"))
    # queue.put("http://ip.chinaz.com/getip.aspx")

    # 子进程的参数
    sub_kwargs = {
        "task_queue": queue,        # 任务队列
        "proxy_manager": pm,        # 代理管理
        "log_config": log_config,   # log配置
    }

    Logger.info("start")

    # 初始化线程池
    sc = Scheduler(scheduler_size, _subprocess, sub_kwargs)
    # 关闭线程池且运行
    sc.join()


if __name__ == '__main__':
    _main()
