# -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
import pandas as pd
import os
import time
import hashlib
from datetime import datetime
from requests_html import HTMLSession  # 替代requests处理动态内容

# 配置参数（已更新）
URL = "https://guba.eastmoney.com/list,zssh000001.html"
FILE_NAME = "上证指数_评论记录.xlsx"
HEADERS = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36",
    "Referer": "https://guba.eastmoney.com/",
    "Accept-Language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7",
    "Cookie": "qgqp_b_id=a6159529b88ca109377361e35222f0a7; mtp=1; ct=lp9_E0NHjZC6P1kf5hMXyWlxSqWzDZHs1dWsknLL4mntxgE1CA458o-ZNxxY-J_WkkcUmKPGqwwTlukRLQc_fuMqdqO4Z5X1C3H78ZT6ZNnqvTmFhmohNxv4FJKvEbgQZW0VzN_pP2RmLviIgCoNgau2pl-vcokXVuv7Vqi67WI; ut=FobyicMgeV52Ad4fCxim_Nggx1_k1XkwkyQDSd1HZTKpAbJNvHCPWg7t0F7HuGlfUcouozuvQTP6pccci0MivpuvvohaCBsWEmQo0PGzzQNQrFUhBQpomBdwoNyxVecF-3qxv8K1r-KnIeKt4KkliJhjq1DTNWfP3enpuQw4IAlEy2o768HzSFeowgUE9t9tk9Y0WeUBwwu1CPoRksVXf5oYKxinblj8Ekuuhf_-KatiHzvNvEx5IW_F0PVjvhX3_gu1ttNoia7EPA5klFL4P7wZDLPvFQy0; pi=9182057338887248%3Bk9182057338887248%3B%E8%82%A1%E5%8F%8Bf7892282X8%3BpMo8XT1G0Prq1Vo0Ak4zfrwcCB59B8YeclVgzbLknh0Mz3eEVrWbGtvoVMkL1T7qaAj6i7Hx7CA9VVdWF%2BWmUdr8du09CoRiFyyou1mnPYRZdRsEgjAmQ0IKAF0SuehZmYZP25Ch06zwIUQwiF5dT15%2BAoR7qc8fyy7WxQTXLpSZyqmvJu5Mgb2T8DF8ELSo8uoMGSLD%3BmkENppde8Xxn%2BqmzYvKS2D1RLQh5eQXTqY2OJ7pOHgZVvUOLtp1NgQT6exqQ1MZnTZkpMpQbt98zP5XplUIHZLu2AbJYeC6STRvQRUuI%2FkTaH5riXcY75WvBDHy8Dk5p1CvYZ9g4LNOYffWgxVPD5r5kAAWYAQ%3D%3D; uidal=9182057338887248%e8%82%a1%e5%8f%8bf7892282X8; sid=; vtpst=|; EMFUND1=null; EMFUND2=null; EMFUND3=null; EMFUND4=null; EMFUND5=null; EMFUND0=null; EMFUND7=02-24%2018%3A27%3A27@%23%24%u666F%u987A%u957F%u57CE%u4E2D%u8BC1A500ETF%u8054%u63A5A@%23%24022444; EMFUND8=02-25%2015%3A48%3A45@%23%24%u6C38%u8D62%u5148%u8FDB%u5236%u9020%u667A%u9009%u6DF7%u5408%u53D1%u8D77A@%23%24018124; EMFUND9=02-25%2016%3A28%3A58@%23%24%u6C38%u8D62%u5148%u8FDB%u5236%u9020%u667A%u9009%u6DF7%u5408%u53D1%u8D77C@%23%24018125; emshistory=%5B%22%E5%8D%8E%E6%B3%B0%E4%BF%9D%E5%85%B4%E5%AE%89%E6%82%A6%E5%80%BA%E5%88%B8C%22%2C%2224%E5%9B%BD%E7%89%B901%22%2C%2224%E7%89%B9%E5%9B%BD01%22%5D; EMFUND6=02-27 10:25:32@#$%u534E%u6CF0%u4FDD%u5174%u5B89%u60A6%u503A%u5238C@%23%24020741; fund_registerAd_1=1; websitepoptg_api_time=1740656749753; HAList=ty-101-GC00Y-COMEX%u9EC4%u91D1%2Cty-1-603777-%u6765%u4F0A%u4EFD%2Cty-0-300699-%u5149%u5A01%u590D%u6750%2Cty-1-000300-%u6CAA%u6DF1300%2Cty-1-019756-24%u7279%u56FD06%2Cty-1-019766-25%u56FD%u503A01%2Cty-1-000001-%u4E0A%u8BC1%u6307%u6570%2Cty-1-000905-%u4E2D%u8BC1500%2Cty-0-399808-%u4E2D%u8BC1%u65B0%u80FD; rskey=LheVfeURON3Z4Q3NPdmpqTWVpdjFETDVCZz09fd0LP; fullscreengg=1; fullscreengg2=1; st_si=02479296464675; st_asi=delete; st_pvi=15279966947538; st_sp=2025-02-24%2010%3A17%3A01; st_inirUrl=https%3A%2F%2Fpassport2.eastmoney.com%2F; st_sn=2;"  # 从浏览器复制有效Cookie
}

def get_content_hash(text):
    """生成内容唯一标识"""
    return hashlib.md5(text.encode('utf-8')).hexdigest()

def fetch_dynamic_comments():
    """支持动态加载的爬取方法"""
    try:
        session = HTMLSession()
        resp = session.get(URL, headers=HEADERS, timeout=20)
        resp.html.render(sleep=3, timeout=20)  # 渲染JavaScript
        soup = BeautifulSoup(resp.html.html, "lxml")
        
        # 新版HTML选择器（已适配2025年结构）
        comments = []
        for item in soup.select("div[class^='comment_item']"):  # 模糊匹配容器
            time_tag = item.find('span', class_='time')
            text_tag = item.find('div', class_='text')
            
            if time_tag and text_tag:
                raw_time = time_tag.get_text(strip=True)
                content = text_tag.get_text(strip=True)
                
                # 时间格式处理（示例格式：2025-02-28 14:20）
                beijing_time = datetime.strptime(raw_time, "%Y-%m-%d %H:%M").strftime("%Y-%m-%d %H:%M:%S")
                
                comments.append({
                    "采集时间": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                    "评论时间": beijing_time,
                    "评论内容": content,
                    "内容哈希": get_content_hash(content)
                })
        return comments
    except Exception as e:
        print(f"[ERROR] 抓取失败: {str(e)}")
        return []

def save_to_excel(data):
    """带去重功能的存储方法"""
    if not data:
        return
    
    df_new = pd.DataFrame(data)
    if os.path.exists(FILE_NAME):
        df_old = pd.read_excel(FILE_NAME)
        df_merged = pd.concat([df_old, df_new]).drop_duplicates(subset=["内容哈希"], keep="last")
    else:
        df_merged = df_new
    
    df_merged.to_excel(FILE_NAME, index=False)

def smart_scheduler():
    """智能调度器（带随机延迟）"""
    import random
    while True:
        print(f"\n[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] 启动抓取周期")
        try:
            comments = fetch_dynamic_comments()
            if comments:
                save_to_excel(comments)
                print(f"成功保存{len(comments)}条新评论")
            else:
                print("本次未发现新评论")
        except Exception as e:
            print(f"全局异常: {str(e)}")
        
        # 随机延迟（25-35分钟）
        delay = 1800 + random.randint(-300, 300)
        time.sleep(delay)

if __name__ == "__main__":
    smart_scheduler()
