import requests
from bs4 import BeautifulSoup
import pymysql
from datetime import datetime

# 数据库配置
DB_CONFIG = {
    'host': 'localhost',
    'user': 'root',
    'password': '123456',
    'database': 'weather_db',
    'charset': 'utf8mb4'
}


def create_database_tables():
    """创建数据库和表"""
    conn = pymysql.connect(**DB_CONFIG)
    cursor = conn.cursor()

    # 创建当前天气表
    cursor.execute("""
    CREATE TABLE IF NOT EXISTS current_weather (
        id INT AUTO_INCREMENT PRIMARY KEY,
        city VARCHAR(50) NOT NULL,
        temperature VARCHAR(10),
        weather_condition VARCHAR(50),
        wind_direction VARCHAR(20),
        wind_speed VARCHAR(20),
        humidity VARCHAR(20),
        air_quality VARCHAR(50),
        update_time VARCHAR(50),
        collection_time DATETIME DEFAULT CURRENT_TIMESTAMP
    )
    """)

    # 创建未来天气预报表
    cursor.execute("""
    CREATE TABLE IF NOT EXISTS forecast_weather (
        id INT AUTO_INCREMENT PRIMARY KEY,
        city VARCHAR(50) NOT NULL,
        date VARCHAR(20),
        day_of_week VARCHAR(10),
        weather_condition VARCHAR(50),
        high_temp VARCHAR(10),
        low_temp VARCHAR(10),
        wind_direction VARCHAR(20),
        wind_speed VARCHAR(20),
        collection_time DATETIME DEFAULT CURRENT_TIMESTAMP
    )
    """)

    conn.commit()
    cursor.close()
    conn.close()


def scrape_beijing_weather():
    """采集北京天气数据"""
    url = "https://tianqi.2345.com/beijing1d/54511.htm"
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36 Edg/137.0.0.0'
    }

    try:
        response = requests.get(url, headers=headers)
        response.raise_for_status()
        soup = BeautifulSoup(response.text, 'html.parser')

        # 采集当前天气
        current_weather = {
            'city': '北京',
            'temperature': soup.find('div', class_='real-today').find('span', class_='temp').get_text(),
            'weather_condition': soup.find('div', class_='real-today').find('span', class_='weather').get_text(),
            'wind_direction': soup.find('div', class_='real-today').find_all('span', class_='wind')[0].get_text(),
            'wind_speed': soup.find('div', class_='real-today').find_all('span', class_='wind')[1].get_text(),
            'humidity': soup.find('div', class_='real-today').find('span', class_='humidity').get_text().replace('湿度',
                                                                                                                 ''),
            'air_quality': soup.find('div', class_='real-today').find('span', class_='air-quality').get_text(),
            'update_time': soup.find('div', class_='real-today').find('span', class_='time').get_text()
        }

        # 采集未来7天天气预报
        forecast_data = []
        forecast_items = soup.find('div', class_='seven-day').find_all('li')[1:8]  # 跳过今天

        for item in forecast_items:
            date = item.find('span', class_='date').get_text()
            day_of_week = item.find('span', class_='week').get_text()
            weather = item.find('span', class_='weather').get_text()
            temp = item.find('span', class_='temp').get_text()
            high_temp, low_temp = temp.split('/')
            wind = item.find('span', class_='wind').get_text()

            forecast_data.append({
                'city': '北京',
                'date': date,
                'day_of_week': day_of_week,
                'weather_condition': weather,
                'high_temp': high_temp,
                'low_temp': low_temp,
                'wind_direction': wind.split(' ')[0],
                'wind_speed': wind.split(' ')[1] if len(wind.split(' ')) > 1 else ''
            })

        return current_weather, forecast_data

    except Exception as e:
        print(f"采集天气数据时出错: {e}")
        return None, None


def save_to_database(current_weather, forecast_data):
    """将数据保存到数据库"""
    try:
        conn = pymysql.connect(**DB_CONFIG)
        cursor = conn.cursor()

        # 保存当前天气
        cursor.execute("""
        INSERT INTO current_weather (city, temperature, weather_condition, wind_direction, 
                                    wind_speed, humidity, air_quality, update_time)
        VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
        """, (current_weather['city'], current_weather['temperature'],
              current_weather['weather_condition'], current_weather['wind_direction'],
              current_weather['wind_speed'], current_weather['humidity'],
              current_weather['air_quality'], current_weather['update_time']))

        # 保存天气预报
        for forecast in forecast_data:
            cursor.execute("""
            INSERT INTO forecast_weather (city, date, day_of_week, weather_condition, 
                                         high_temp, low_temp, wind_direction, wind_speed)
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
            """, (forecast['city'], forecast['date'], forecast['day_of_week'],
                  forecast['weather_condition'], forecast['high_temp'],
                  forecast['low_temp'], forecast['wind_direction'],
                  forecast['wind_speed']))

        conn.commit()
        print("数据成功保存到数据库")

    except Exception as e:
        conn.rollback()
        print(f"保存到数据库时出错: {e}")
    finally:
        cursor.close()
        conn.close()


def main():
    # 创建数据库表
    create_database_tables()

    # 采集天气数据
    current_weather, forecast_data = scrape_beijing_weather()

    if current_weather and forecast_data:
        # 保存到数据库
        save_to_database(current_weather, forecast_data)

        # 打印采集的数据
        print("当前天气:")
        print(current_weather)
        print("\n未来7天天气预报:")
        for forecast in forecast_data:
            print(forecast)


if __name__ == "__main__":
    main()