#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
爬取国家统计局最新地址库
省市区三级（Json版本）
author: xiexie1993
time: 2020-07-22
"""

import requests
from bs4 import BeautifulSoup
import json
import os
import datetime
import time


g_province_count = 0
g_city_count = 0
g_area_count = 0

g_province_lists = []
g_city_lists = {}
g_area_lists = {}

g_error_count =0

def manual_get_province_city_area():
    # 省字典
    province_lists = [
        11,
        12,
        13,
        14,
        15,
        21,
        22,
        23,
        31,
        32,
        33,
        34,
        35,
        36,
        37,
        41,
        42,
        43,
        44,
        45,
        46,
        50,
        51,
        52,
        53,
        54,
        61,
        62,
        63,
        64,
        65
    ]
    global g_province_count
    global g_city_count
    global g_area_count
    print("[INFO] province_lists =", province_lists)
    for province in province_lists:
        print("[INFO] province =",province)
        # 市级
        province_href = str(province) + ".html"
        province_code = str(province) + "0000"
        g_province_count += 1
        print("抓取统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))
        get_city(province_href, province_code)


def get_province(index_href):
    """抓取省份信息"""
    # 创建字典
    #  json_data = {}
    global g_province_count
    global g_city_count
    global g_area_count
    province_url = url + index_href
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'
    }
    request = requests.get(province_url, headers=headers)
    #  print('[Debug] request=', request)
    request.encoding = 'gbk'
    province_html_text = str(request.text)
    #  print('[Debug] province_html_text=', province_html_text)
    soup = BeautifulSoup(province_html_text, "html.parser")
    #  print('[Debug] soup=', soup)
    province_tr_list = soup.select('.provincetr a')
    #  print('[Debug] province_tr_list=', province_tr_list)
    #  province_list = {}
    #  province_lists = []
    province_key = 0
    city_lists = []
    
    # 遍历省份列表信息
    for province_tr in province_tr_list:
        if province_tr:
            province_href = province_tr.attrs['href']
            #  print('[Debug] province_href=', province_href)
            province_no = province_href.split('.')[0]
            #  print('[Debug] province_no=', province_no)
            province_code = province_no + '0000'
            #  print('[Debug] province_code=', province_code)
            province_name = province_tr.text
            #  print('[Debug] province_name=', province_name)
            #  province_info = {'code': province_code, 'name': province_name}
            province_info = {'text': province_name, 'value': province_code}
            #  print('[Debug] province_info=', province_info)
            #  province_list.setdefault(province_key, province_info)
            #  province_lists.append(province_info)
            g_province_lists.append(province_info)
            #  print('[Debug] province_lists==', province_lists)
            province_key += 1
            g_province_count = province_key
            print("[Debug] 开始抓取第%s个省:%s,编码：%s" %(province_key,province_name,province_code))
            print("抓取统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))
            #  print('新增数据：', province_info)
            # 市级
            get_city(province_href, province_code)

    #  g_province_lists = province_lists
    #  print('[INFO] g_province_lists=',g_province_lists)
    print("抓取统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))

def get_city(province_href, province_code):
    """抓取市级城市信息"""
    global g_province_count
    global g_city_count
    global g_area_count
    print('[INFO]开始抓取编码为%s的省下的市级信息' %(province_code))
    #  print('[Debug] province_href=', province_href)
    #  print('[Debug] province_code=', province_code)
    city_url = url + province_href
    #  print('[Debug] city_url=', city_url)
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'
    }
    request = requests.get(city_url, headers=headers)
    #  print('[Debug] request=', request)
    request.encoding = 'gbk'
    city_html_text = str(request.text)
    #  print('[Debug] city_html_text=', city_html_text)
    soup = BeautifulSoup(city_html_text, "html.parser")
    #  print('[Debug] soup=', soup)
    city_tr_list = soup.select('.citytr')
    city_list = []
    city_key = 0
    # 遍历市级城市列表信息
    for city_tr in city_tr_list:
        if city_tr:
            city_a_info = city_tr.select('a')
            #  print('[Debug] city_a_info=', city_a_info)
            city_href = city_a_info[0].attrs['href']
            #  print('[Debug] city_href=', city_href)
            city_code = city_a_info[0].text[:6]
            #  print('[Debug] city_code=', city_code)
            city_name = city_a_info[1].text
            #  print('[Debug] city_name=', city_name)
            #  city_info = {'code': city_code, 'name': city_name}
            city_info = {'text': city_name,'value': city_code, }
            #  print('[Debug] city_info=', city_info)
            #  city_list.setdefault(city_key, city_info)
            city_list.append(city_info)
            #  print('[Debug] city_list=', city_list)
            #  city_key += 1
            g_city_count = g_city_count + 1
            #  print('[Debug] city_key=', city_key)
            print('[INFO]抓取市级信息：', city_info)
            print("抓取统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))
            # 区级
            #  get_data3 = get_area(city_href, city_code)
            get_area(city_href, city_code, city_name)
            #  area_lists[city_code] = get_data3['area_list']
    g_city_lists[province_code]=city_list
    print("抓取统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))

def get_area(city_href, city_code, city_name):
    """抓取区级信息"""
    global g_province_count
    global g_city_count
    global g_area_count
    global g_error_count
    global g_nowTime
    #  ret_data3 = {}
    #  print('[INFO]开始抓取区级信息')
    area_url = url + city_href
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'
    }
    request = requests.get(area_url, headers=headers)
    #  print('[Debug] request=', request)
    request.encoding = 'gbk'
    area_html_text = str(request.text)
    #  print('[Debug] area_html_text=', area_html_text)
    soup = BeautifulSoup(area_html_text, "html.parser")
    #  print('[Debug] soup=', soup)
    area_tr_list = soup.select('.countytr')
    area_list = []
    area_key = 0
    # 遍历区级列表信息
    for area_tr in area_tr_list:
        area_a_info = area_tr.select('td')
        #  print('[Debug] area_a_info=', area_a_info)
        if area_a_info:
            area_code = area_a_info[0].text[:6]
            area_name = area_a_info[1].text
            #  area_info = {'code': area_code, 'name': area_name}
            area_info = {'text': area_name, 'value': area_code}
            #  area_list.setdefault(area_key, area_info)
            area_list.append(area_info)
            #  print('[Debug] area_list=', area_list)
            #  area_key += 1
            g_area_count = g_area_count + 1
            #  print('[Debug] area_key=', area_key)
            print('[INFO]抓取到该市区下的区级数据：', area_info)
            print("抓取统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))
    g_area_lists[city_code]=area_list
    if (len(area_list)==0 and g_error_count < 10 ):
        g_error_count +=1
        print("[INFO] -------------- 该页没抓取到%s （%s）下的区信息，重新读取----------尝试次数： %s -------" %(city_name, city_code, g_error_count))
        print("抓取统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))
        # 暂缓几秒再请求
        time.sleep(1.5)
        get_area(city_href, city_code, city_name)
    else:
        if g_error_count ==10:
            print("[INFO] -------------- 该页没抓取到%s （%s）下的区信息，记录日志----------尝试次数： %s -------" %(city_name, city_code, g_error_count))
            filename = "json2019/error_log_" + str(g_nowTime) + ".txt"
            file = open(filename, 'a+', encoding='utf-8')
            file.write(city_name + " --- " + city_code + "\n")
            file.close()
        else:
            print("[INFO] 抓取到了")
        g_error_count = 0
    #  g_area_count = g_area_count + area_key
    #  print('[Debug] g_area_count=',g_area_count)
    print("抓取统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))

# 程序主入口
if __name__ == "__main__":
    url = 'http://www.stats.gov.cn/tjsj/tjbz/tjyqhdmhcxhfdm/2019/'
    # 创建json目录
    json_folder = 'json2019/'
    if not os.path.exists(json_folder):
        os.makedirs(json_folder)
    print('[INFO]开始…')

    #  nowTime = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
    g_nowTime = datetime.datetime.now().strftime('%Y%m%d%H%M%S')

    manual_get_province_city_area()
    #  get_province('index.html')

    print("统计： %s 省， %s 市， %s 个区" %(g_province_count,g_city_count,g_area_count))


    filename = "json2019/province" + str(g_nowTime) + ".json"
    print('[Debug] filename=',filename)
    file = open(filename, 'w', encoding='utf-8')
    json.dump(g_province_lists, file, skipkeys=False,ensure_ascii=False,indent=4)
    file.close()

    filename = "json2019/city" + str(g_nowTime) + ".json"
    print('[Debug] filename=',filename)
    file1 = open(filename, 'w', encoding='utf-8')
    json.dump(g_city_lists, file1, skipkeys=False,ensure_ascii=False,indent=4)
    file1.close()

    filename = "json2019/area" + str(g_nowTime) + ".json"
    print('[Debug] filename=',filename)
    file2 = open(filename, 'w', encoding='utf-8')
    json.dump(g_area_lists, file2, skipkeys=False,ensure_ascii=False,indent=4)
    file2.close()

    print('[INFO]数据写入完成！')
