#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import time
from download.downloader import Downloader
from analyze.analyzer import BasicAnalyzer
from store.store import BasicStore
from public.util import run_status

import logging
import traceback


class BasicHandler(object):
    def __init__(self, config):
        self.name = config['name']
        self.mq = config['mq']
        self.rds = config['rds']
        self.queue_name = config.get('queue_name', self.name)
        self.config = config
        self.downloader = self.get_downloader()
        self.analyzer = self.get_analyzer()
        self.store = self.get_store()

    def get_downloader(self):
        return Downloader(self.config)

    def get_analyzer(self):
        if self.analyzer is None:
            return BasicAnalyzer()

    def get_store(self):
        if self.store is None:
            return BasicStore()

    # 需要去重的自己重新实现
    def duplicated_key(self, task):
        return task['duplicated_key'] if 'duplicated_key' in task else None

    # 暂时限定用redis去重
    def is_duplicated(self, task):
        rds_key = self.duplicated_key(task)
        if rds_key is None:
            return False
        return True if self.rds.exists(rds_key) == 1 else False

    # 默认去重周期为7天
    def save_duplicated(self, task):
        rds_key = self.duplicated_key(task)
        if rds_key is None:
            return
        ex_time = task.get('expire_time', 3600 * 24 * 7)
        self.rds.set(rds_key, 1, ex=ex_time)

    def run(self):
        status = run_status['success']
        while True:
            try:
                # cookie失效，暂停处理任务，直至人工介入
                if status == run_status['pause']:
                    logging.warning('spider[{}] is pause, please update cookie and restart it'.format(self.name))
                    time.sleep(60)
                    continue
                task = self.mq.pop(self.queue_name)
                if task is None:
                    time.sleep(1)
                    continue
                # 任务去重
                if self.is_duplicated(task):
                    logging.info('duplicate task {0}'.format(task))
                    continue
                # 下载任务
                status, response = self.downloader.download(task)
                if status != run_status['success']:
                    continue
                # 解析结果
                status, result = self.analyzer.parse(response, task)
                if status != run_status['success']:
                    continue
                # 存储结果
                status = self.save(result, task)
                if status == run_status['except']:
                    # 程序不退出，异常
                    self.downloader.except_deal(task)
                    continue
                elif status != run_status['success']:
                    continue
                # 写入去重key
                self.save_duplicated(task)
            except Exception as e:
                logging.error(e)
                logging.error(traceback.format_exc())

    def save(self, result, task):
        pass

    def gen_next(self, result, task):
        pass

    def close(self):
        pass
