#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import requests
import json
import urllib3
from requests import exceptions
from public.util import run_status, random_ua
import logging
import traceback

'''
task = {
    'type': 'download_xx',               # 必填，任务类型
    'url': 'http://www.xx.com/id.html'   # 必填，url地址， get请将参数放到url中
    'file': './data/download_xx/id.html' # 选填, 是否要存成文件
    'duplicated': False,                 # 选填, False为不去重
    'duplicated_key': 'xx',              # 选填, 去重的key，一般不使用
    'method': 'post',                    # 选填, post接口用
    'params': {'a':'b', 'c':'d'}         # 选填, 传参数
    ...                                  # 其它自定义参数
    'data'                               # data 已经预留，不允许使用
    'expire_time'                        # 选填, 传去重过期时间
}

config = {
    'name'
    'mq'
    'stat'
    'proxy'
    'mdb'
    'analyze_type'
}
'''


class Downloader(object):
    def __init__(self, config):
        self.config = config
        self.name = config['name']  # 下载任务名称
        self.mq = config['mq']      # 任务队列实例
        self.rds = config['rds']    # 去重redis
        self.queue_name = config.get('queue_name', self.name)  # 任务队列名称
        self.except_queue = config.get('except_queue', 'except_queue')  # 发生异常 将任务扔入异常队列

        # 任务处理统计
        self.stat = config.get('stat', None)
        self.stat_hash = config.get('stat_hash', '{0}_stat_hash'.format(self.name))
        self.proxy = config.get('proxy', None)
        self.time_out = config.get('time_out', 5)

    def get_headers(self, task):
        if 'headers' in self.config:
            # headers不要放config里，写这里为了通过pycharm语法检测
            return self.config['headers']
        else:
            return task['headers'] if 'headers' in task else {'user-agent': random_ua()}

    def get_proxy(self):
        return self.proxy.get_proxy(self.config) if self.proxy is not None else None

    def download(self, task):     # 这里task 是redis队列里的任务
        headers = self.get_headers(task)
        if headers is None:
            # 潜规则，headers返回None，需要暂停
            logging.warning('headers is None, do nothing')
            return run_status['pause'], None

        proxies = self.get_proxy()
        response = requests.Response()
        except_flag = False
        try:
            response = self.request(headers, proxies, task)
        except urllib3.exceptions.ProxyError:
            except_flag = True
            self.stat.incr(self.stat_hash, 'proxy_timeout')
            logging.warning('deal {0} proxy {1} urllib3.exceptions.ProxyError'.format(task, proxies))
        except exceptions.ProxyError:
            except_flag = True
            self.stat.incr(self.stat_hash, 'proxy_timeout')
            logging.warning('deal {0} proxy {1} exceptions.ProxyError'.format(task, proxies))
        except exceptions.ReadTimeout:
            except_flag = True
            self.stat.incr(self.stat_hash, 'read_timeout')
            logging.warning('deal {0} proxy {1} exceptions.ReadTimeout'.format(task, proxies))
        except exceptions.ConnectTimeout:
            except_flag = True
            self.stat.incr(self.stat_hash, 'proxy_time_out')
            logging.warning('deal {0} proxy {1} exceptions.ConnectTimeout'.format(task, proxies))
        except Exception as e:
            except_flag = True
            self.stat.incr(self.stat_hash, 'download_except')
            logging.error(e)
            logging.error(traceback.format_exc())
            logging.error('unexpect exception {0}'.format(task))

        # 因为代理或者程序的异常，需要将任务写入队列待重新处理
        if except_flag:
            self.stat.incr(self.stat_hash, 'except')
            self.mq.push(self.except_queue, json.dumps(task))
            return run_status['except'], None

        return run_status['success'], response

    # 重载
    def request(self, headers, proxies, task):
        s = requests.Session()
        if 'method' in task and task['method'] == 'post':
            params = task.get('params', {})
            if proxies is None:
                if 'dumps' in task:
                    response = s.post(task['url'], headers=headers, data=json.dumps(params), timeout=self.time_out)
                else:
                    response = s.post(task['url'], headers=headers, data=params, timeout=self.time_out)
            else:
                if 'dumps' in task:
                    response = s.post(task['url'], headers=headers, data=json.dumps(params), proxies=proxies,
                                      timeout=self.time_out)
                else:
                    response = s.post(task['url'], headers=headers, data=params, proxies=proxies, timeout=self.time_out)
        else:
            if proxies is None:
                if 'params' in task:
                    response = s.get(task['url'], headers=headers, params=task['params'], timeout=self.time_out)
                else:
                    response = s.get(task['url'], headers=headers, timeout=self.time_out)
            else:
                if 'params' in task:
                    response = s.get(task['url'], headers=headers, proxies=proxies, params=task['params'],
                                     timeout=self.time_out)
                else:
                    response = s.get(task['url'], headers=headers, proxies=proxies, timeout=self.time_out)
        return response
