import os
import time
import traceback
from datetime import datetime, timedelta

def auto_run_daily():
    logger = Logger()
    secret_id = os.getenv('TENCENT_SECRET_ID', 'REDACTED')
    secret_key = os.getenv('TENCENT_SECRET_KEY', 'REDACTED')
    crawler = ProductHunterCrawler(secret_id, secret_key, logger)
    max_retries = 5
    for attempt in range(max_retries):
        try:
            today = datetime.now().strftime("%Y-%m-%d")
            logger.log(f"=== 自动爬虫启动，尝试第{attempt+1}次 ===")
            filepath = crawler.run(today)
            if filepath:
                logger.log("=== 自动爬虫完成 ===")
                logger.log(f"数据已保存到: {filepath}")
                break
            else:
                raise Exception("未获取到数据文件")
        except Exception as e:
            logger.log(f"[自动爬虫异常] {str(e)}")
            logger.log(traceback.format_exc())
            if attempt < max_retries - 1:
                logger.log("5秒后重试...")
                time.sleep(5)
    logger.close()

if __name__ == "__main__":
    while True:
        now = datetime.now()
        target = now.replace(hour=18, minute=0, second=0, microsecond=0)
        if now >= target:
            # 如果已经过了18点，等待到明天18点
            target = target + timedelta(days=1)
        wait_seconds = (target - now).total_seconds()
        print(f"距离下次自动爬取还有 {int(wait_seconds)} 秒")
        time.sleep(wait_seconds)
        auto_run_daily()