import csv
import time

import requests

from bs4 import BeautifulSoup


def requestUrl(url):
    resp = requests.get(url, "lxml", headers={
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0"})
    resp.encoding = resp.apparent_encoding
    return resp.text


def get_City() -> list:
    text = requestUrl("https://lishi.tianqi.com/")
    soup = BeautifulSoup(text, "lxml")

    ret = []

    ul = soup.find_all("ul", class_="table_list")
    for e in ul:
        a = e.find_all("a")
        for aa in a:
            hrefs = aa.get("href").split("/")
            ret.append((aa.get_text(), hrefs[0] if len(hrefs[0]) > 0 else hrefs[1]))

    print(ret)
    return ret


def write_City(cityList: list):
    with open("dataset/city.csv", "a", encoding="utf-8", newline="") as f:
        csv_writer = csv.writer(f)
        name = ['name', 'id']
        csv_writer.writerow(name)
        csv_writer.writerows(cityList)
        print("写入数据成功")
        f.close()


def read_City() -> list:
    ret = []
    with open("dataset/city.csv", "r", encoding="utf-8") as f:
        csv_reader = csv.reader(f)
        for row in csv_reader:
            ret.append(row)
            # print(row)
        f.close()
    return ret[1:]


def create_Weather_CSV():
    with open(f"dataset/weather.csv", "w", encoding="utf-8", newline="") as f:
        csv_writer = csv.writer(f)
        name = ['name', 'date', 'maxTemp', 'minTemp', 'weather', 'direction']
        csv_writer.writerow(name)
        f.close()


def get_Start_Index() -> int:
    ret = 0
    with open(f"dataset/start.csv", "r", encoding="utf-8") as f:
        csv_reader = csv.reader(f)
        for row in csv_reader:
            ret = int(row[0])
            break
        f.close()
    return ret


def plus_Start_Index():
    start = get_Start_Index()
    with open(f"dataset/start.csv", "w", encoding="utf-8") as f:
        csv_writer = csv.writer(f)
        csv_writer.writerow([start + 1])
        f.close()


def get_Weather():
    city = read_City()
    start = get_Start_Index()
    city = city[start:]
    print(f"即将开始：{city[0][0]}， 序号：{start}")
    for e in city:

        ret = []
        startTime = time.time()
        for month in ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']:
            time.sleep(0.5)
            url = f"https://lishi.tianqi.com/{e[1]}/2023{month}.html"
            text = requestUrl(url)
            soup = BeautifulSoup(text, "lxml")
            if soup.find("div", class_="error") is not None:
                continue
            ul = soup.find("ul", class_="thrui")
            lis = ul.find_all("li")
            for li in lis:
                div = li.find_all("div")
                ret.append([
                    e[0],
                    div[0].get_text(),
                    div[1].get_text(),
                    div[2].get_text(),
                    div[3].get_text(),
                    div[4].get_text(),
                ])

        with open(f"dataset/weather.csv", "a", encoding="utf-8", newline="") as f:
            csv_writer = csv.writer(f)
            csv_writer.writerows(ret)
            endTime = time.time()
            print(f"写入 {e[0]} 数据成功, 耗时{endTime - startTime}s")
            f.close()
        plus_Start_Index()


if __name__ == '__main__':
    # listt = get_City()
    # write_City(listt)
    # print(read_City())
    # create_Weather_CSV()
    get_Weather()
