"""
cron: 0 */12 * * *
new Env('房价数据监控');
"""
import requests
import os
import mysql.connector
from datetime import datetime
from bs4 import BeautifulSoup

# 获取环境变量
host = os.environ.get('HOST')
user = os.environ.get('USER')
password = os.environ.get('PASSWORD')
database = os.environ.get('DATABASE')

print("正在获取环境变量...")
print(f"数据库地址：{host}")
print(f"数据库用户名：{user}")
print(f"数据库密码：{password}")
print(f"数据库名称：{database}")

# 连接数据库
connection = mysql.connector.connect(
    host=host,
    user=user,
    password=password,
    database=database
)

cursor = connection.cursor()

# 创建表（如果不存在）
create_table_query = """
CREATE TABLE IF NOT EXISTS ningm_house_price (
    id INT AUTO_INCREMENT PRIMARY KEY,
    city VARCHAR(50) NOT NULL,
    district VARCHAR(50) NOT NULL,
    avg_price DECIMAL(10,2),
    total_count INT,
    min_price DECIMAL(10,2),
    max_price DECIMAL(10,2),
    create_time DATETIME DEFAULT CURRENT_TIMESTAMP,
    INDEX idx_city (city),
    INDEX idx_district (district),
    INDEX idx_create_time (create_time)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
"""
cursor.execute(create_table_query)

# 要监控的城市和区域
cities = {
    'beijing': {
        'name': '北京',
        'districts': ['朝阳', '海淀', '东城', '西城', '丰台']
    },
    'shanghai': {
        'name': '上海',
        'districts': ['浦东', '徐汇', '静安', '黄浦', '长宁']
    }
}

def get_district_data(city_code, district, city_name):
    """获取某个区域的房价数据"""
    try:
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
            'Referer': f'https://{city_code}.lianjia.com/ershoufang/{district}/'
        }
        
        url = f'https://{city_code}.lianjia.com/ershoufang/{district}/'
        response = requests.get(url, headers=headers)
        
        if response.status_code == 200:
            soup = BeautifulSoup(response.text, 'html.parser')
            
            # 获取总房源数
            total_count = int(soup.select_one('.total span').text.strip())
            
            # 获取均价
            price_items = soup.select('.unitPrice')
            if price_items:
                prices = [int(item.text.strip('单价元/平米')) for item in price_items]
                avg_price = sum(prices) / len(prices)
                min_price = min(prices)
                max_price = max(prices)
            else:
                return None
            
            print(f"正在保存{city_name}{district}区域数据...")
            insert_query = """
                INSERT INTO ningm_house_price 
                (city, district, avg_price, total_count, min_price, max_price) 
                VALUES (%s, %s, %s, %s, %s, %s)
            """
            insert_values = (
                city_name,
                district,
                avg_price,
                total_count,
                min_price,
                max_price
            )
            cursor.execute(insert_query, insert_values)
            return True
            
    except Exception as e:
        print(f"获取{city_name}{district}区域数据失败: {str(e)}")
        return None

# 删除旧数据（保留30天内的数据）
print("删除旧数据...")
delete_query = "DELETE FROM ningm_house_price WHERE create_time < DATE_SUB(NOW(), INTERVAL 30 DAY)"
cursor.execute(delete_query)

# 获取并保存数据
print("开始获取房价数据...")
for city_code, city_info in cities.items():
    for district in city_info['districts']:
        get_district_data(city_code, district, city_info['name'])
        # 添加延时避免请求过快
        time.sleep(2)

connection.commit()
cursor.close()
connection.close()

print("数据保存完成！") 