import copy
from bs4 import BeautifulSoup
from dbpool import get_connection_exception
from utils import replace_str, select_use_db_conf


HTML_TYPE = "html"
DB_TYPE = "database"
ALL_TYPE = [
    HTML_TYPE,
    DB_TYPE
]

class HtmlParser(object):
    def __init__(self, parse_html_conf, parse_url_conf):
        self._html_conf = parse_html_conf
        self._url_conf = parse_url_conf
        self.parse_html_conf = copy.deepcopy(self._html_conf)
        self.parse_url_conf = copy.deepcopy(self._url_conf)

    def parse(self, url, html_cont):
        self.parse_html_conf = copy.deepcopy(self._html_conf)
        self.parse_url_conf = copy.deepcopy(self._url_conf)
        if html_cont is None:
            return
        soup = BeautifulSoup(html_cont, 'html.parser', from_encoding='utf-8')
        parse_data = dict()
        for field, conf in self.parse_html_conf.items():
            if conf['type'] == HTML_TYPE:
                parse_data[field] = self.get_parse_data(soup, conf)
            elif conf['type'] == DB_TYPE:
                parse_data[field] = self.get_db_data(url, conf)
        parse_data = self.value_map(parse_data)
        parse_url = self.get_parse_url(parse_data)

        if parse_data:
            return self.add_parse_url_handle(parse_url), self.add_parse_data_handle(parse_data)
        else:
            print('解析html错误：')

    def add_parse_data_handle(self, _data):
        '''附加处理'''
        return _data

    def add_parse_url_handle(self, _data):
        '''附加处理'''
        return _data

    def get_db_data(self, url, conf):
        if conf['db_conf'].get('replace'):
            conf['db_conf']['replace'] = replace_str(conf['db_conf']['replace'], {'url': url})
        db_info_list = select_use_db_conf(conf['db_conf'])
        print('db_info_list:')
        print(db_info_list)
        value = [replace_str(replace_str(conf['value'], i), conf.get('replace')) for i in db_info_list]
        print('value:')
        print(value)
        return value

    def get_parse_data(self, soup, conf):
        res_data = list()
        this_soup = _get_soup(soup, conf['parse_conf'])
        if not this_soup:
            return ''

        if conf['target'] == 'p_str':
            if isinstance(this_soup, list):
                soup_text_list = [i.text.strip() for i in this_soup]
                return ''.join(soup_text_list)
            else:
                return this_soup.text.strip()
        elif conf['target'] == 'li_list':
            for child in this_soup:
                res_data.append(child.text.strip())
        elif conf['target'] == 'href_list':
            for child in this_soup:
                res_data.append(child.get('href'))
        else:
            raise Exception('return type error!')
        return res_data

    def get_parse_url(self, parse_data):
        res_url = list()
        return res_url

    def value_map(self, parse_data):
        for field, datas in parse_data.items():
            if self.parse_html_conf[field].get('value_map'):
                if self.parse_html_conf[field]['value_map']['type'] == 'from_dict':
                    map_dict = self.parse_html_conf[field]['value_map']['map_dict']
                elif self.parse_html_conf[field]['value_map']['type'] == 'from_db':
                    db_conf = self.parse_html_conf[field]['value_map']['db_conf']
                    db_info_list = select_use_db_conf(db_conf)
                    # map_dict = dict(eval(replace_str(self.parse_html_conf[field]['value_map']['value'], i)) for i in db_info_list)
                    map_dict = {
                        i[self.parse_html_conf[field]['value_map']['key_field']]:
                        i[self.parse_html_conf[field]['value_map']['value_field']]
                        for i in db_info_list}
                if isinstance(parse_data[field], list):
                    parse_data[field] = [map_dict.get(i) for i in parse_data[field]]
                elif isinstance(parse_data[field], str):
                    parse_data[field] = map_dict.get(parse_data[field])
        return parse_data


def _get_class_soup(soup, class_name, num_n=1):
    """
    获取soup里下一个class的Tag值
    """
    for index, bs in enumerate(soup.find_all(class_=class_name)):
        if index+1 < num_n:
            continue
        else:
            return bs

def _get_id_soup(soup, id_name, num_n=1):
    """
    获取soup里下一个id的Tag值
    """
    for index, bs in enumerate(soup.find_all(id=id_name)):
        if index+1 < num_n:
            continue
        else:
            return bs

def _get_next_soup(soup, tag, num_n=1):
    """
    获取soup里下一个tag的Tag值
    """
    n = 1
    for i in soup.next_siblings:
        if hasattr(i, 'name') and i.name == tag:
            if n == num_n:
                return i
            else:
                n += 1
    return None


def _get_inner_soup(soup, tag, num_n=1):
    """
    获取soup里面第num_n个Tag值
    """
    sb_list = list()
    if isinstance(soup, list):
        for s in soup:
            for sb in s.find_all(tag):
                if num_n == 0:
                    sb_list.append(sb)
                else:
                    raise Exception("num_n值必须为0")
    else:
        for index, sb in enumerate(soup.find_all(tag)):
            if num_n == 0:
                sb_list.append(sb)
            else:
                if index+1 == num_n:
                    return sb
    return sb_list


def _get_soup(soup, class_tag, level=1):
    _type = class_tag[level - 1][0]
    _tag = class_tag[level - 1][1]
    _note = class_tag[level - 1][2]
    if _type == 'class':
        bs = _get_class_soup(soup, _tag, int(_note or 1))
    elif _type == 'id':
        bs = _get_id_soup(soup, _tag, int(_note or 1))
    elif _type == 'next':
        bs = _get_next_soup(soup, _tag, int(_note))
    elif _type == 'inner':
        bs = _get_inner_soup(soup, _tag, int(_note))
    if level == len(class_tag):
        return bs
    else:
        return _get_soup(bs, class_tag, level + 1)
