import json
import logging
import os
from urllib.parse import urlsplit

import requests


def getHTMLText(url):
    try:
        r = requests.get(url, timeout=30)
        r.raise_for_status()
        r.encoding = r.apparent_encoding
        return r.text
    except:
        return ""


def writeFile(start_url, content):
    ret = urlsplit(start_url)
    p = os.path.basename(ret.path)
    path = 'data/'
    if not os.path.exists(path):
        os.makedirs(path)

    path += p
    with open(path, 'w', encoding='utf-8') as f:
        f.write(content)


def getProvinceOrCity(start_url):
    html = getHTMLText(start_url)
    if html == '':
        logging.info("url get error,url is:" + start_url)
        return

    writeFile(start_url, html)
    all_province = json.loads(html)
    data = all_province['features']
    for province in data:
        if province['properties']["level"] == "district":
            break
        logging.info('current area is:' + str(province['properties']['name']))
        url = 'https://geo.datav.aliyun.com/areas/bound/' + str(province['properties']['adcode']) + '_full.json'
        if url == start_url:
            logging.info('current get is repeat...')
            break
        logging.info('url is:' + url)
        getProvinceOrCity(url)


def get(start_url):
    html = getHTMLText(start_url)
    writeFile(start_url, html)
    all_province = json.loads(html)
    data = all_province['rows']
    for province in data:
        logging.info('current area is:' + str(province['name']))
        url = 'https://geo.datav.aliyun.com/areas/bound/' + str(province['adcode']) + '_full.json'
        getProvinceOrCity(url)


def main():
    start_url = 'https://geo.datav.aliyun.com/areas/csv/100000_province.json'
    get(start_url)


logging.basicConfig(level=logging.INFO)
main()
