import requests
from bs4 import BeautifulSoup
import csv

def fun(city, year):
    with open(f"北京2022.csv", "w", newline="", encoding="utf-8") as csvfile:
        writer = csv.writer(csvfile)
        writer.writerow(["Time", "Quality", "AQI", "PM2.5", "PM10", "SO2", "NO2", "CO", "O3"])  # Write header row
        for i in range(1, 13):
            m = ""
            if i < 10:
                m = "0" + str(i)
            else:
                m = str(i)
            url = "http://www.tianqihoubao.com/aqi/" + city + "-" + str(year) + m + ".html"
            req = requests.get(url)
            html = BeautifulSoup(req.text, "lxml")
            tab = html.find("table")
            tr = tab.find_all("tr")
            for row in tr[1:]:
                td = row.find_all("td")
                time = td[0].text.strip()  # No need to split and rejoin
                zldj = td[1].text.strip()  # No need to split and rejoin
                aqi = td[2].text.strip()   # No need to split and rejoin
                pm25 = td[4].text.strip()  # No need to split and rejoin
                pm10 = td[5].text.strip()  # No need to split and rejoin
                so2 = td[6].text.strip()   # No need to split and rejoin
                no2 = td[7].text.strip()   # No need to split and rejoin
                co = td[8].text.strip()    # No need to split and rejoin
                o3 = td[9].text.strip()    # No need to split and rejoin
                writer.writerow([time, zldj, aqi, pm25, pm10, so2, no2, co, o3])  # Write row to CSV

fun("beijing", 2022)
