#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import time
import requests
import uuid
import urllib3
import logging
import traceback
import random
import re
from requests import exceptions as request_ex

from pjdog_agent import get_agent
from pjdog_config import PJDogSession
from pjdog_public import check_path


def format_element(element):
    if not isinstance(element, str):
        return element
    result = re.sub('\s', '', element)
    result = re.sub('[\'\" \/]', '', result)
    result = re.sub('（', '(', result)
    result = re.sub('）', ')', result)
    idx = result.find('公司')
    if idx >= 0:
        result = result.split('公司')[0].strip() + '公司'
    return result


def fake_url():
    first = str(uuid.uuid4()).replace('-', '')
    second = str(uuid.uuid4()).replace('-', '')
    url_str = '{0}-q-{1}{2}'.format(first[:18], first[18:], second)
    url = 'https://www.baidu.com/link?url={0}'.format(url_str)
    return url


def get_session(url, data, proxy, headers=None):
    s = requests.Session()
    if headers is None:
        headers = {}
    if data is None:
        pass
    i = 0
    ok_flag = False
    while i < 3:
        i += 1
        try:
            headers['User-Agent'] = get_agent()
            headers['Referer'] = fake_url()
            headers['Host'] = 'www.qichacha.com'
            headers['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'
            headers['Accept-Encoding'] = 'gzip, deflate'
            s.get(url, headers=headers, proxies=proxy)
            ok_flag = True
            break
        except Exception as e:
            logging.error(e)
            logging.error(traceback.format_exc())
            s = requests.Session()
            time.sleep(random.randint(1, 3))
    if ok_flag:
        pjdog_session = PJDogSession(s, headers, proxy)
        logging.info('get session ok')
        return pjdog_session
    else:
        logging.error('get session failed')
    return None


def download(pjdog_session, spider_url, out_file):
    try:
        headers = pjdog_session.headers
        headers['User-Agent'] = get_agent()
        headers['Referer'] = fake_url()
        response = requests.get(spider_url, headers=headers, proxies=pjdog_session.proxy, cookies = pjdog_session.session.cookies)
    except urllib3.exceptions.ProxyError:
        return -2
    except request_ex.ProxyError:
        return -4
    except request_ex.ChunkedEncodingError:
        return -6
    except request_ex.SSLError:
        return -8
    except request_ex.ReadTimeout:
        return -10
    except Exception as e:
        logging.error(e)
        logging.error(traceback.format_exc())
        return -1
    if response.status_code == 200:
        if len(response.text) > 0:
            try:
                check_path(out_file)
                encodes = requests.utils.get_encodings_from_content(response.text)
                if len(encodes) > 0:
                    cur_encode = encodes[0]
                else:
                    cur_encode = response.apparent_encoding
                if cur_encode != 'utf-8':
                    content = response.content.decode(cur_encode)
                else:
                    content = response.text
                with open(out_file, 'w', encoding='utf-8') as fp:
                    fp.write(content)
            except Exception as e:
                logging.error(e)
                logging.error(traceback.format_exc())
                return -1
    return response.status_code
