import sys
import os
import time
import threading
import logging

sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import traceback
import queue
from queue import Queue
from report_system.spider.task import Task
from report_system.spider.request_ex import RequestEx
from report_system.spider import spider_config


class Spider(object):

    def __init__(self,
                 spider_num=spider_config.spider_num_,
                 spider_interval_sec=spider_config.spider_interval_sec_,
                 spider_retry_interval_sec=spider_config.spider_retry_interval_sec_,
                 user_agent='random' if spider_config.random_user_agent_ else spider_config.user_agent_,
                 proxies=spider_config.proxies_,
                 request_ex_max_retry=spider_config.request_ex_max_retry_,
                 task_max_retry_num=spider_config.task_max_retry_num_,
                 ignore_exception=False,
                 exception_mail_to=None,
                 auto_stop=True
                 ):
        """
        :param spider_num: 爬虫线程数量
        :param spider_interval_sec: 爬虫任务处理间隔时间
        :param spider_retry_interval_sec: 爬虫异常重试间隔时间
        :param user_agent: header 代理参数，默认 random 随机
        :param proxies: 代理服务器，默认阿布云代理
        :param request_ex_max_retry: 爬虫重试次数
        :param task_max_retry_num: 任务重试次数
        :param ignore_exception: 是否忽略 url 任务异常
        :param exception_mail_to: 接收异常邮件地址
        :param auto_stop: 无任务时是否自动停止
        """
        self.thread_num_ = spider_num if spider_num > 0 else spider_config.spider_num_
        self.spider_interval_sec_ = spider_interval_sec
        self.spider_retry_interval_sec_ = spider_retry_interval_sec
        self.user_agent_ = user_agent
        self.proxies_ = proxies
        self.request_ex_max_retry_ = request_ex_max_retry
        self.task_max_retry_num_ = task_max_retry_num
        self.exp_ = None
        self.ignore_exception_ = ignore_exception
        self.exception_mail_to_ = exception_mail_to
        self.auto_stop_ = auto_stop
        self.requesting_queue_ = Queue()
        self.queue_ = Queue()
        self.stop_queue_ = Queue()
        self.handler_queue_ = Queue()

    def queue_size(self):
        return self.queue_.qsize()

    def handler_queue_size(self):
        return self.handler_queue_.qsize()

    def req(self, handler, **kwargs):
        """
        向爬虫队列增加一个任务，无url的任务会等待所有url类型的任务完成后执行
        :param handler: 任务处理 handler
        :param kwargs:
        :return:
        """
        task = Task(handler=handler, **kwargs)
        self.__add_task(task)

    def retry(self, task, sleep_sec=0):
        task.retry_num_ += 1
        if sleep_sec > 0:
            time.sleep(sleep_sec)
        self.__add_task(task)

    def __add_task(self, task):
        if task.url() is not None:
            self.requesting_queue_.put(0)
            self.queue_.put(task)
        else:
            self.handler_queue_.put(task)

    def __get_task(self):
        task = self.queue_.get(block=True)
        return task

    def __del_task(self):
        try:
            self.requesting_queue_.get(block=False)
        except queue.Empty:
            pass

    def __clear_task(self):
        self.handler_queue_.queue.clear()
        self.requesting_queue_.queue.clear()
        self.queue_.queue.clear()

    def __url_task_running(self):
        return not self.requesting_queue_.empty()

    def __process(self):
        while True:
            task = self.__get_task()
            if task is None:
                return
            task_retry = 0
            rsp = None
            while True:
                try:
                    kwargs = task.req_kwargs_
                    headers = kwargs['headers'] if 'headers' in kwargs else dict()
                    # 默认代理
                    if 'proxies' not in kwargs:
                        kwargs['proxies'] = self.proxies_
                    # 默认 user-agent
                    if headers is not None and 'user-agent' not in headers and 'User-Agent' not in headers:
                        if self.user_agent_ == 'random':
                            headers['user-agent'] = RequestEx.ua_.random
                        elif self.user_agent_ is not None:
                            headers['user-agent'] = self.user_agent_
                        if headers['user-agent'] is not None:
                            kwargs['headers'] = headers
                    req = RequestEx()
                    rsp = req.get(retry_interval_sec=self.spider_retry_interval_sec_,
                                  max_retry=self.request_ex_max_retry_, **kwargs)
                    task.handle_rsp(rsp)
                    break
                except Exception as exp:
                    exp_str = '爬虫异常, url=%s, e=%s, status_code=%d, text=%s, trace=%s' % \
                              (task.url()[0:100],
                               str(exp)[0:200],
                               rsp.status_code if hasattr(rsp, 'status_code') else 0,
                               rsp.text[0:200] if hasattr(rsp, 'text') else '',
                               traceback.format_exc()[0:500])
                    logging.warning(exp_str)
                    if task_retry >= self.task_max_retry_num_:
                        if not self.ignore_exception_:
                            self.exp_ = exp_str
                            self.__stop()
                        break
                    time.sleep(1)
                    task_retry += 1
            self.__del_task()
            time.sleep(task.interval_sec_)

    def __handler_process(self):
        while True:
            # 等待 url 任务完成
            while self.__url_task_running():
                time.sleep(0.1)
            # 从队列获取一个任务
            task = None
            try:
                task = self.handler_queue_.get(block=False, timeout=1)
            except queue.Empty:
                pass
            # 任务队列为空，停止
            if task is None:
                if self.auto_stop_:
                    self.__stop()
                    return
                else:
                    continue
            # 执行任务
            try:
                task.handle_rsp(None)
            except Exception as exp:
                self.exp_ = '爬虫异常, e=%s, traceback=%s' % (repr(exp)[0:200], traceback.format_exc()[0:500])
                logging.warning(self.exp_)
                self.__stop()

    def run(self):
        """
        启动任务
        :return:
        """
        self.exp_ = None
        self.stop_queue_.queue.clear()
        handler_thread = threading.Thread(target=self.__handler_process)
        handler_thread.start()
        threads = []
        for i in range(0, self.thread_num_):
            thread = threading.Thread(target=self.__process)
            threads.append(thread)
            thread.start()
        for thread in threads:
            thread.join()
        handler_thread.join()
        logging.info('任务完成')
        if self.exp_ is not None:
            # if self.exception_mail_to_ is not None:
            #    util.report_exception(self.exp_[0:3000], self.exception_mail_to_)
            raise Exception(self.exp_)

    def __stop(self):
        self.__clear_task()
        for i in range(0, self.thread_num_):
            self.queue_.put(None)

    def stop(self):
        """
        主动停止任务
        :return:
        """
        logging.info('停止爬虫 ...')
        self.__stop()


def handler(r, s):
    logging.info('%d, %s', r.status_code, r.text)
    from report_system.utils.redis_util import r_pool
    r_pool.set('response2', r.text, 60)
    print(r.text)
    s.stop()


def main():
    # util.init_logging()
    spider = Spider(exception_mail_to='bianzhiwei@iyoujia.com')
    spider.req(
        url='https://www.zhenguo.com/api/phx/cprod/products?dateBegin=20181128&dateEnd=20181128&cityPinyin=shanwei&locationCategory=a&locationId=21095',
        handler=handler, s=spider)
    spider.run()
    logging.info('完成')


if __name__ == '__main__':
    try:
        main()
    except Exception as e:
        logging.error('异常, %s, %s' % (repr(e), traceback.format_exc()))
