import os
import sys
import django
import requests
from bs4 import BeautifulSoup
import re

# 将项目根目录添加到Python路径中
current_dir = os.path.dirname(os.path.abspath(__file__))
project_root = os.path.dirname(os.path.dirname(current_dir))
sys.path.append(project_root)

# 设置Django环境
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'weather_comparison.settings')
django.setup()

from weather_app.models import WeatherData

class WeatherSpider:
    def parse_weather(self, url):
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
        }
        try:
            response = requests.get(url, headers=headers)
            soup = BeautifulSoup(response.text, 'html.parser')
            
            table = soup.find('table', class_='b')
            if not table:
                print("未找到天气数据表格")
                return
                
            for tr in table.find_all('tr')[1:]:  # 跳过表头
                tds = tr.find_all('td')
                if len(tds) == 4:
                    # 处理数据
                    date = re.sub(r'\s+', '', tds[0].text.strip())
                    weather = tds[1].text.strip()
                    temperature = tds[2].text.strip()
                    wind = tds[3].text.strip()
                    
                    # 保存到数据库
                    WeatherData.objects.create(
                        date=date,
                        weather=weather,
                        temperature=temperature,
                        wind=wind
                    )
                    print(f"成功保存 {date} 的天气数据")

        except Exception as e:
            print(f"处理数据时出错: {e}")

    def run(self):
        url = 'http://tianqihoubao.com/lishi/chongqing/month/202406.html'
        try:
            self.parse_weather(url)
            print("数据爬取完成并保存到数据库")
        except Exception as e:
            print(f"爬取过程中出现错误: {e}")

if __name__ == '__main__':
    spider = WeatherSpider()
    spider.run()
