#!/usr/bin/env python
# -*- coding:utf-8 -*-
import abc
import os
import re
import time

import requests
import six
from lxml import etree

from const import PATH, START_ARGS
from util import init_logger
from util.user_agents import get_user_agent

__all__ = ['BaseParser']

re_encoding = re.compile(r'<meta.*?charset(?:="|=)(?P<encoding>.*?)(?:;|").*?>')
re_request_exception = re.compile(r'(?<=requests.exceptions.).*(?=\')')


@six.add_metaclass(abc.ABCMeta)
class BaseParser:
    START_ARGS = ()
    use_db = []

    def __init__(self, name, workspace=os.path.join(PATH, 'logs'), options=None, msg_que=None, stream=False,
                 debug=False, timeout=10, dbs=None):
        self.name = name
        self.workspace = workspace
        self.options = options
        self.msg_que = msg_que
        self.dbs_conn = {}
        self.timeout = timeout
        if dbs:
            for db_name in self.use_db:
                db = dbs.get(db_name, None)
                if db is not None:
                    self.dbs_conn[db_name] = db
                else:
                    self.dbs_conn[db_name] = None
        self.log, self.log_sh, _ = init_logger(name='parser.%s' % (name,), sub_name=name, workspace=self.workspace,
                                               multiproc=True, stream=stream, debug=debug)

    def start(self, args):
        if args == START_ARGS:
            args = self.START_ARGS
        self.run(*args)

    @abc.abstractmethod
    def run(self, *args):
        pass

    def http_get(self, url, headers=None, proxy=None, modify=None):
        self.log.info('http_get|url->%r|proxy->%s|headers->%r', url, proxy if proxy else None, headers)
        if proxy:
            proxies = {
                'http': proxy,
                'https': proxy
            }
        else:
            proxies = None
        tm = time.time()
        try:
            ret = None
            default_headers = {
                'Connection': 'close',
                'User-Agent': get_user_agent()
            }
            if headers:
                default_headers.update(headers)

            with requests.Session() as sess:
                sess.keep_alive = False
                rsp = sess.get(url, headers=default_headers, proxies=proxies, timeout=self.timeout)
                if rsp.status_code != 200:
                    raise requests.exceptions.RequestsWarning('http get code(%d) is not 200' % (rsp.status_code,))

                e = re_encoding.search(rsp.content)
                if e:
                    encoding = e.group('encoding').strip()
                else:
                    encoding = rsp.apparent_encoding

                if encoding == 'GB2312':
                    encoding = 'GBK'

                rsp.encoding = encoding
                text = rsp.text
                if modify is not None:
                    text = modify(text)
                ret = etree.HTML(text)
        except Exception as e:
            raise e

        self.log.info('http_get|duration->%g', time.time() - tm)
        return ret, rsp
