# import logging
# from test_code import fun
# from functools import wraps
# class setLog:
#     @classmethod
#     def request(cls, retry=6):
#         def decorate(func):
#             @wraps(func)
#             def wrapper(*args, **kwargs):
#                 for _ in range(retry):
#                     try:
#                         return func(*args, **kwargs)
#                     except AssertionError as e:
#                         log.warning('AssertionError %s'%(args if args else kwargs))
#                     except TypeError as e:
#                         log.warning(e)
#                     except Exception as e:
#                         log.exception(e)
#                 else:
#                     log.error('request fatil')
#             return wrapper
#         return decorate



# log = logging.getLogger(__name__)
# ft = logging.Formatter('%(asctime)s - %(name)s - %(funcName)s - %(levelname)s - %(message)s','%Y-%m-%d %H:%M:%S')

# fh = logging.FileHandler('spider.log')
# fh.setFormatter(ft)
# fh.setLevel(logging.WARNING)

# sh = logging.StreamHandler()
# sh.setFormatter(ft)
# sh.setLevel(logging.DEBUG)

# # log.addHandler(fh)
# log.addHandler(sh)

# log.setLevel(logging.DEBUG)
# log.debug('hello')

# # def hh():

# #     log.debug('hello')

# # hh()
# # fun()

# @setLog.request()
# def test(i):
#   b'Hello %b!' % i
# test(2)

# from collections import defaultdict
# # import collections
# # s = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)]

# d = defaultdict(lambda:)
# # d.setdefault(9)
# d['a']+=1

# print(d['a'])

# print(dt['a'])
# import time
# from tqdm import tqdm

# a = [1,2,3,4]
# for i in tqdm(a,desc='hello'):
#   time.sleep(1)
#   pass

import requests
from bs4 import BeautifulSoup
from pprint import pprint
from config import get_header
import urllib

def format_html(html):
    soup = BeautifulSoup(html,'lxml')
    ret_lis = []
    ret_dic = {}

    trs = soup.select('#Table1 tr')

    td = trs[1].select('td')
    ret_dic['name'] = td[0].text
    ret_dic['sex'] = td[1].text
    ret_dic['birth'] = td[2].text
    ret_dic['edu'] = td[3].text
    ret_dic['nation'] = td[4].text

    ret_dic['des'] = trs[2].select('td')[1].get_text(strip=True)

    for tr in soup.select('#Table3 tr'):
        tds = tr.select('td')
        if len(tds)==5:
            dic = {}
            dic['com_name'] = tds[0].text
            dic['post'] = tds[1].text
            dic['start'] = tds[2].text
            dic['end'] = '2999-01-01' if tds[3].text == '--' else tds[3].text
            dic['reward'] = tds[4].get_text(strip=True) if tds[4].get_text(strip=True) else None
            ret_lis.append(dic)
        else:
            print(tds)
    ret_dic['post_his'] = ret_lis






    pprint(ret_dic)

    # print(ret_dic)
def test():
    # url = 'http://vip.stock.finance.sina.com.cn/corp/view/vCI_CorpManagerInfo.php?stockid=000776&Name=' + urllib.parse.quote_from_bytes('林治海'.encode('gbk'))
    # session = requests.Session()
    # session.headers.update(get_header())

    # req = session.get(url)
    # req.encoding = 'gb2312'

    # with open('test/sbl.html','wt')as f:
    #     f.write(req.text)
    with open('test/sbl.html','rt')as f:
        html = f.read()
    format_html(html)

    # print(html)

test()