import json
import logging
import string
import time

import requests
from bs4 import BeautifulSoup

base_url = 'http://www.stats.gov.cn/sj/tjbz/tjyqhdmhcxhfdm/2022/'
extra_cookie = ''
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')


def write_json(obj):
    json_dump = json.dumps(obj, indent=2, ensure_ascii=False)
    w = open('./area.json', 'w')
    w.write(json_dump)
    w.close()


def new_document(url: string):
    global extra_cookie
    r = open('./cookie.json', 'r')
    content = r.read()
    cookie_json = json.loads(content)
    cookie = ''
    for key in cookie_json:
        cookie += key + '=' + cookie_json[key] + ';'
    r.close()
    headers = {
        'Host': 'www.stats.gov.cn',
        'Upgrade-Insecure-Requests': '1',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,'
                  'application/;v=b3;q=0.7',
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) '
                      'Chrome/115.0.0.0 Safari/537.36 Edg/115.0.1901.203',
        'Cookie': cookie + extra_cookie
    }
    response = requests.get(url, headers=headers)
    response.encoding = 'utf-8'
    if response.status_code == 302:
        ck = response.headers.get('Set-Cookie')
        split = ck.split(';')
        for kv in split:
            if kv.__contains__('wzws_cid'):
                extra_cookie = kv
                logging.warning('触发第一层反爬机制，设置cookie')
                break
        return new_document(url)
    if not response.status_code == 200:
        logging.error(f'请求失败（{response.status_code}）：{url}')
        time.sleep(5 * 60)
    logging.info(f'Url-----{url}')
    return BeautifulSoup(response.text, 'html.parser')


def init_areas():
    area_array = []
    document = new_document(base_url)
    if document.get_text().__contains__('Please enable JavaScript and refresh the page.'):
        logging.error(f'请求成功,但节点为空: {base_url}')
        return
    for i, a_tag in enumerate(document.select('tr.provincetr td a')):
        area = {
            'label': a_tag.get_text(),
            'code': a_tag.get('href').replace('.html', ''),
            'sort': i + 1,
            'children': load_children(a_tag.get('href'))
        }
        area_array.append(area)
    return area_array


def load_children(href: string):
    children = []
    if not href.strip():
        logging.warning('空链接')
        return None
    document = new_document(base_url + href)
    table = document.find('table', class_=lambda value: value and value.endswith('table'))
    if not table:
        if document.get_text().__contains__('Please enable JavaScript and refresh the page.'):
            logging.error(f'请求成功,但节点为空: {base_url + href}')
            time.sleep(10 * 60)
        else:
            logging.error(f'html解析失败: {base_url + href}')
        return load_children(href)
    tr_tags = table.find_all('tr', class_=lambda value: value and value.endswith('tr'))
    for i, tr_tag in enumerate(tr_tags):
        td_tag = tr_tag.find_all('td')
        area = {
            'label': '',
            'code': td_tag[0].get_text(),
            'sort': i + 1,
        }

        if not td_tag[0].find('a'):
            area['label'] = td_tag[len(td_tag) - 1].get_text()
        else:
            area['label'] = td_tag[1].get_text()
            url = td_tag[1].contents[0].get('href')
            prefix = href[0:href.rfind('/') + 1]
            if not url.startswith(prefix):
                url = prefix + url
            area['children'] = load_children(url)
        children.append(area)
    return children


def transfer(seconds):
    hour = seconds // 3600
    min = seconds % 3600 // 60
    sec = seconds % 60
    res = ''
    if not hour == 0:
        res = f'{hour}小时'
    if not min == 0:
        res = f'{res}{min}分钟'
    return f'{res}{sec}秒'


if __name__ == '__main__':
    start = time.perf_counter()
    areas = init_areas()
    end = time.perf_counter()
    exec_time = end - start
    logging.info(f'执行完成，耗时：{transfer(exec_time)}')
    logging.info('开始写入json文件')
    start = time.perf_counter()
    write_json(areas)
    end = time.perf_counter()
    exec_time = end - start
    logging.info(f'写入完成，耗时：{transfer(exec_time)}')
