import requests
from fake_useragent import UserAgent
from bs4 import BeautifulSoup

prefix = "https://www.weather.com.cn/"

url = ["https://www.weather.com.cn/textFC/hb.shtml", "https://www.weather.com.cn/textFC/db.shtml",
       "https://www.weather.com.cn/textFC/hd.shtml", "https://www.weather.com.cn/textFC/hz.shtml",
       "https://www.weather.com.cn/textFC/hn.shtml", "https://www.weather.com.cn/textFC/xb.shtml",
       "https://www.weather.com.cn/textFC/xn.shtml", "https://www.weather.com.cn/textFC/gat.shtml"]

save_code_url = "http://127.0.0.1:8001/regionCode/save"

# 一般省会解析
def parseCommonCode(provinceNode):
    provinceMap = {}
    cityMap = {}

    province = provinceNode.text
    new_url = prefix + provinceNode["href"]
    headers = {
        "user-agent": UserAgent().random
    }
    response = requests.get(new_url, headers=headers)
    response.encoding = "utf-8"

    html = BeautifulSoup(response.text, "html.parser")
    div = html.find("div", {"class": "hanml"}).find("div", {"class": "conMidtab"})
    cityDivs = div.find_all("div", {"class": "conMidtab3"})

    for cityDiv in cityDivs:
        # 一个市的数据
        trs = cityDiv.find("table").find_all("tr")
        countyMap = {}
        city = ""
        for i in range(len(trs)):
            # 遍历每个区县
            tds = trs[i].find_all("td")
            if i == 0:
                city = tds[0].text
                county = tds[1].find("a").text
                countyPath = tds[1].find("a")["href"]
            else:
                county = tds[0].find("a").text
                countyPath = tds[0].find("a")["href"]
            countyCode = countyPath.split("/")[-1].split(".")[0]
            countyMap[county] = countyCode
        cityMap[city] = countyMap
    provinceMap[province] = cityMap
    return provinceMap


if __name__ == '__main__':
    for l in url:
        headers = {
            "user-agent": UserAgent().random
        }
        response = requests.get(l, headers=headers)
        response.encoding = "utf-8"

        # 解析
        html = BeautifulSoup(response.text, "html.parser")
        div = html.find("div", {"class": "hanml"}).find("div", {"class": "conMidtab"})
        if l != "https://www.weather.com.cn/textFC/gat.shtml":
            conMidTabs = div.find_all("div", {"class": "conMidtab2"})
            for tabDiv in conMidTabs:
                provinceNode = tabDiv.find("table").find_all("tr")[2].find("a")
                result = parseCommonCode(provinceNode)

                requests.post(save_code_url, json=result)
        else:
            conMidTabs = div.find("div", {"class": "conMidtab2"})

            gatTable = conMidTabs.find("table")
            xgTrs = gatTable.find_all("tr")[2:3]
            provinceNode = xgTrs[0].find_all("td")[0].find("a")
            result = parseCommonCode(provinceNode)
            requests.post(save_code_url, json=result)

            gatTable = gatTable.find("table")
            amTrs = gatTable.find_all("tr")[2:3]
            provinceNode = amTrs[0].find_all("td")[0].find("a")
            result = parseCommonCode(provinceNode)
            requests.post(save_code_url, json=result)

            gatTable = gatTable.find("table")
            twTrs = gatTable.find_all("tr")[2:]
            provinceNode = twTrs[0].find_all("td")[0].find("a")
            result = parseCommonCode(provinceNode)
            requests.post(save_code_url, json=result)

        response.close()