"""
cron: */30 * * * *
new Env('知乎热榜采集');
"""
import requests
import os
import mysql.connector
from datetime import datetime

# 获取环境变量
host = os.environ.get('HOST')
user = os.environ.get('USER')
password = os.environ.get('PASSWORD')
database = os.environ.get('DATABASE')

print("正在获取环境变量...")
print(f"数据库地址：{host}")
print(f"数据库用户名：{user}")
print(f"数据库密码：{password}")
print(f"数据库名称：{database}")

# 发送HTTP请求获取知乎热榜
print("发送HTTP请求获取知乎热榜...")
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
    'Referer': 'https://www.zhihu.com/hot'
}

url = 'https://www.zhihu.com/api/v3/feed/topstory/hot-lists/total'

response = requests.get(url, headers=headers)

# 连接到MySQL数据库
print("连接到MySQL数据库...")
connection = mysql.connector.connect(
    host=host,
    user=user,
    password=password,
    database=database
)

cursor = connection.cursor()

# 设置要处理的ID范围（101-120用于知乎热榜）
id_list = list(range(101, 121))

# 删除已有数据
print("删除已有数据...")
delete_query = "DELETE FROM ningm_news_list WHERE id IN ({})".format(", ".join(str(i) for i in id_list))
cursor.execute(delete_query)

# 解析并保存数据
print("解析并保存知乎热榜数据...")
if response.status_code == 200:
    data = response.json()
    hot_list = data.get('data', [])
    
    for index, item in enumerate(hot_list[:20]):
        target = item.get('target', {})
        title = target.get('title', '')
        url = f"https://www.zhihu.com/question/{target.get('id')}"
        hot_value = item.get('detail_text', '暂无热度')
        
        print(f"正在保存第{index + 1}条数据...")
        insert_query = "INSERT INTO ningm_news_list (id, title, url, create_time, contant, type) VALUES (%s, %s, %s, %s, %s, %s)"
        insert_values = (id_list[index], title, url, datetime.now(), hot_value, 'zhihu')
        cursor.execute(insert_query, insert_values)

# 提交更改并关闭连接
connection.commit()
cursor.close()
connection.close()

print("数据保存完成！") 