import requests
import os
import mysql.connector
from datetime import datetime

# 获取环境变量
host = os.environ.get('HOST')
user = os.environ.get('USER')
password = os.environ.get('PASSWORD')
database = os.environ.get('DATABASE')

print("正在获取环境变量...")
print(f"数据库地址：{host}")
print(f"数据库用户名：{user}")
print(f"数据库密码：{password}")
print(f"数据库名称：{database}")

# 发送HTTP请求获取掘金热榜
print("发送HTTP请求获取掘金热榜...")
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
    'Referer': 'https://juejin.cn'
}

url = 'https://api.juejin.cn/recommend_api/v1/article/recommend_all_feed'
response = requests.get(url, headers=headers)

# 连接数据库
connection = mysql.connector.connect(
    host=host,
    user=user,
    password=password,
    database=database
)

cursor = connection.cursor()

# 设置ID范围（141-160用于掘金）
id_list = list(range(141, 161))

# 删除已有数据
print("删除已有数据...")
delete_query = "DELETE FROM ningm_news_list WHERE id IN ({})".format(", ".join(str(i) for i in id_list))
cursor.execute(delete_query)

# 解析并保存数据
print("解析并保存掘金热榜数据...")
if response.status_code == 200:
    data = response.json()
    items = data.get('data', [])
    
    for index, item in enumerate(items[:20]):
        article_info = item.get('article_info', {})
        title = article_info.get('title', '')
        url = f"https://juejin.cn/post/{article_info.get('article_id')}"
        content = f"点赞数: {article_info.get('digg_count', 0)}"
        
        print(f"正在保存第{index + 1}条数据...")
        insert_query = "INSERT INTO ningm_news_list (id, title, url, create_time, contant, type) VALUES (%s, %s, %s, %s, %s, %s)"
        insert_values = (id_list[index], title, url, datetime.now(), content, 'juejin')
        cursor.execute(insert_query, insert_values)

connection.commit()
cursor.close()
connection.close()

print("数据保存完成！") 