# -*- coding: utf-8 -*-
import logging
import time

import requests


class HttpClient(object):
    def __init__(self):
        self.headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
                        'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6',
                        'Connection': 'keep-alive',
                        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.35'}
        self.logger = logging.getLogger('http_requests')
        self.session = requests.session()

    def get_response(self, method, url, encoding=None, sleep_time=0, depth=0, use_proxy=True, **kwargs):
        """
        当depth < 0用本地ip爬取数据，若depth >= 0用代理ip爬取
        :return:
        """
        depth = 0
        res = {
            'fetch_status_code': None,
            'fetch_time': None,
            'fetch_start_time': None
        }
        if not use_proxy:
            kwargs['proxies'] = None

        while True:
            depth += 1
            if depth > 3:
                kwargs['proxies'] = None

            if depth > 4:
                return None, res
            # time.sleep(sleep_time)
            if not url:
                self.logger.debug('return None;url is None;')
                return None, res

            if ('http://' not in url) and ('https://' not in url):
                self.logger.debug('return None;url is error;')
                return None, res

            # 若传入参数没有headers,会用默认headers
            if not 'headers' in list(kwargs.keys()):
                kwargs['headers'] = self.headers

            # 默认不验证https证书
            if not 'verify' in list(kwargs.keys()):
                kwargs['verify'] = False

            # 默认超时为60秒
            if not 'timeout' in list(kwargs.keys()):
                kwargs['timeout'] = (60, 120)

            # 若use_proxy为False,则不用代理，否则随机取代理列表的代理

            try:
                res['fetch_start_time'] = time.time()
                self.logger.info('request {} start # {}'.format(depth, url))
                response = self.session.request(method, url, **kwargs)
                self.logger.info('request {} end # {} # {}'.format(depth, response.status_code, url))
                res['fetch_time'] = time.time() - res['fetch_start_time']
                res['fetch_status_code'] = response.status_code
                if response.status_code == 200:
                    content_length = int(response.headers.get('Content-Length', '0'))
                    res['fetch_content_size'] = len(response.content)
                    res['fetch_content_length'] = content_length
                    if response.headers.get('Content-Length'):
                        if content_length > len(response.content):
                            res['fetch_status_code'] = 599
                            self.logger.info('request {} # 文件不完整 # {}'.format(depth, url))
                            continue
                    if encoding:
                        response.encoding = encoding
                    return response, res
                else:
                    self.logger.error(('request {} # status_code error # {}'.format(depth, response.status_code)))
                    return None, res
            except requests.exceptions.ProxyError as e:
                res['fetch_status_code'] = 599
                self.logger.error(('request {} # ProxyError # {}'.format(depth, url)))
            except requests.exceptions.Timeout as e:
                res['fetch_status_code'] = 599
                self.logger.error(('request {} # Timeout # {}'.format(depth, url)))
            except requests.RequestException as e:
                res['fetch_status_code'] = 599
                self.logger.error(('request {} # RequestException # {}'.format(depth, url)))
