from bs4 import BeautifulSoup
import urllib.request
import sqlite3

class WeatherDB:
    def __init__(self):
        self.con = None
        self.cursor = None
    
    def open_db(self):
        """打开数据库并初始化表结构"""
        self.con = sqlite3.connect("weathers.db")
        self.cursor = self.con.cursor()

        self.cursor.execute("""
            CREATE TABLE IF NOT EXISTS weathers (
                wCity VARCHAR(16),
                wDate VARCHAR(16),
                wWeather VARCHAR(64),
                wTemp VARCHAR(32),
                PRIMARY KEY (wCity, wDate)
            )
        """)
        self.cursor.execute("DELETE FROM weathers")

    def close_db(self):
        """关闭数据库连接"""
        if self.con:
            self.con.commit()
            self.con.close()

    def insert_weather(self, city, date, weather, temp):
        """插入天气数据"""
        try:
            self.cursor.execute(
                "INSERT INTO weathers VALUES (?, ?, ?, ?)",
                (city, date, weather, temp)
            )
        except Exception as e:
            print(f"插入数据失败: {e}")

    def display_data(self):
        """显示所有天气数据"""
        self.cursor.execute("SELECT * FROM weathers")
        rows = self.cursor.fetchall()

        header = "%-16s%-16s%-32s%-16s"
        print(header % ("city", "date", "weather", "temp"))
        for row in rows:
            print(header % row)

class WeatherForecast:
    def __init__(self):
        self.headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
        }
        self.city_codes = {
            "北京": "101010100", 
            "上海": "101020100", 
            "泉州": "101230501",
            "厦门": "101230201"
        }
        self.db = WeatherDB()

    def get_weather_data(self, city):
        """获取单个城市的天气数据"""
        if city not in self.city_codes:
            print(f"未找到城市编码: {city}")
            return

        url = f"http://www.weather.com.cn/weather/{self.city_codes[city]}.shtml"
        
        try:
            req = urllib.request.Request(url, headers=self.headers)
            response = urllib.request.urlopen(req)
            html = response.read().decode('utf-8', 'ignore')
            
            soup = BeautifulSoup(html, 'lxml')
            weather_items = soup.select("ul.t.clearfix li")
            
            for item in weather_items:
                self._parse_weather_item(city, item)
                
        except Exception as e:
            print(f"获取{city}天气数据失败: {e}")

    def _parse_weather_item(self, city, item):
        """解析单个天气项"""
        try:
            date = item.select_one('h1').text
            weather = item.select_one('p.wea').text
            temp_tag = item.select_one('p.tem')

            high_temp = temp_tag.select_one('span')
            low_temp = temp_tag.select_one('i')
            
            temp = f"{high_temp.text}/{low_temp.text}" if high_temp else low_temp.text
            
            print(city, date, weather, temp)
            self.db.insert_weather(city, date, weather, temp)
            
        except Exception as e:
            print(f"解析天气项失败: {e}")

    def process_cities(self, cities):
        """处理多个城市的天气数据"""
        self.db.open_db()
        
        for city in cities:
            self.get_weather_data(city)
        
        self.db.display_data()
        self.db.close_db()

if __name__ == "__main__":
    forecast = WeatherForecast()
    forecast.process_cities(["北京", "上海", "泉州", "厦门"])
    print("数据获取完成")