# -*- coding:utf-8 -*-
# @FileName  :locations.py
# @Time      :2024/2/4 9:51
import datetime
import time

import requests
from lxml import etree

from spider.pg_utils.pg_connect import get_connect

base_url = "https://www.stats.gov.cn/sj/tjbz/tjyqhdmhcxhfdm/2023/"


# 省
def get_province_data():
    resp = requests.get(base_url)
    resp.encoding = "utf-8"

    html = etree.HTML(resp.text)
    tds = html.xpath("//tr[@class='provincetr']/td/a")
    data = []

    for td in tds:
        url = td.xpath("./@href")[0]
        name = td.xpath("./text()")[0]
        data.append({
            "url": base_url + url,
            "name": name,
            "code": url.split(".")[0],
            "full_name": name,
            "level": "province"
        })

    data_to_db(data)

    return data


# 市
def get_city_data():
    city_data = []
    for item in get_province_data():
        url = item.get("url")
        if not url:
            continue

        resp = requests.get(url)
        resp.encoding = "utf-8"
        html = etree.HTML(resp.text)
        tds = html.xpath("//tr[@class='citytr']")
        data = []

        for td in tds:
            code = td.xpath("./td[1]/a/text()")[0]
            name = td.xpath("./td[2]/a/text()")[0]
            try:
                url = base_url + td.xpath("./td[2]/a/@href")[0]
            except IndexError:
                url = ""

            data.append({
                "url": url,
                "name": name,
                "code": code,
                "full_name": item.get("full_name") + "-" + name,
                "level": "city"
            })

        data_to_db(data)
        city_data.extend(data)

    return city_data


# 县
def get_county_data():
    county_data = []
    for item in get_city_data():
        url = item.get("url")
        if not url:
            continue

        resp = requests.get(url)
        resp.encoding = "utf-8"
        html = etree.HTML(resp.text)
        tds = html.xpath("//tr[@class='countytr']")
        data = []

        for td in tds:
            try:
                code = td.xpath("./td[1]/a/text()")[0]
            except IndexError:
                code = td.xpath("./td[1]/text()")[0]

            try:
                name = td.xpath("./td[2]/a/text()")[0]
            except IndexError:
                name = td.xpath("./td[2]/text()")[0]

            try:
                url = base_url + f"{item.get('code')[:2]}/" + td.xpath("./td[2]/a/@href")[0]
            except IndexError:
                url = ""

            data.append({
                "url": url,
                "name": name,
                "code": code,
                "full_name": item.get("full_name") + "-" + name,
                "level": "county"
            })

        data_to_db(data)
        county_data.extend(data)

    return county_data


# 镇
def get_town_data():
    town_data = []
    for item in get_county_data():
        url = item.get("url")
        if not url:
            continue

        resp = requests.get(url)
        resp.encoding = "utf-8"
        html = etree.HTML(resp.text)
        tds = html.xpath("//tr[@class='towntr']")
        data = []

        for td in tds:
            code = td.xpath("./td[1]/a/text()")[0]
            name = td.xpath("./td[2]/a/text()")[0]
            data.append({
                "url": url[:-11] + td.xpath("./td[2]/a/@href")[0],
                "name": name,
                "code": code,
                "full_name": item.get("full_name") + "-" + name,
                "level": "town"
            })

        data_to_db(data)
        town_data.extend(data)

    return town_data


# 村
def get_village_data():
    village_data = []
    for item in get_town_data():
        url = item.get("url")
        if not url:
            continue

        resp = requests.get(url)
        resp.encoding = "utf-8"
        html = etree.HTML(resp.text)
        tds = html.xpath("//tr[@class='villagetr']")
        data = []

        for td in tds:
            code = td.xpath("./td[1]/text()")[0]
            category_code = td.xpath("./td[2]/text()")[0]
            name = td.xpath("./td[3]/text()")[0]
            data.append({
                "url": "",
                "name": name,
                "code": code,  # 统计用区划代码
                "category_code": category_code,  # 城乡分类代码
                "full_name": item.get("full_name") + "-" + name,
                "level": "village"
            })

        data_to_db(data)
        village_data.extend(data)

    return village_data


def collect_location_data():
    get_village_data()


def data_to_db(data):
    conn, cur = get_connect()
    # insert_time = datetime.datetime.now()
    # status = 1
    insert_data = []

    for i in data:
        # 更新
        full_name = i.get("full_name")
        level = i.get("level")
        sql = f"UPDATE locations SET full_name='{full_name}',level='{level}'  where code='{i.get('code')}';"
        cur.execute(sql)
        conn.commit()

        continue

        sql = f"select name, code from locations where name='{i.get('name')}' and code='{i.get('code')}';"
        cur.execute(sql)
        result = cur.fetchall()
        if result:
            continue

        insert_data.append(
            (
                i.get("url"),
                i.get("name"),
                i.get("code"),
                i.get("category_code"),
                i.get("full_name"),
                insert_time,
                insert_time,
                status,
            )
        )
    conn.close()
    print(f"更新完成 {len(data)}")
    if not insert_data:
        return

    # 入库
    sql = "INSERT INTO locations (url, name, code, category_code, full_name, created_at, updated_at, status) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
    cur.executemany(sql, insert_data)
    conn.commit()
    conn.close()

    print(f"入库完成 {len(data)}")


if __name__ == '__main__':
    # get_province_data()
    # get_city_data()
    # get_county_data()
    # get_town_data()
    # get_village_data()
    collect_location_data()
