import redis
import time
from datetime import datetime
from apscheduler.schedulers.background import BackgroundScheduler
from config import REDIS_CONFIG, CRAWL_INTERVAL, NEWS_SOURCES
import importlib
import logging

# 配置日志
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

class Scheduler:
    def __init__(self):
        self.redis_client = redis.Redis(**REDIS_CONFIG)
        self.crawlers = []
        self.load_crawlers()

    def load_crawlers(self):
        """加载所有爬虫"""
        for source in NEWS_SOURCES:
            try:
                # 修改为相对导入路径
                module = importlib.import_module(f"crawlers.{source['crawler_module']}")
                crawler_class = getattr(module, source['crawler_class'])
                crawler = crawler_class(source)
                self.crawlers.append(crawler)
            except Exception as e:
                logging.error(f"加载爬虫失败: {str(e)}")
                # 添加更详细的错误信息
                import traceback
                logging.error(traceback.format_exc())

    def should_run_crawler(self, crawler):
        """检查爬虫是否应该运行"""
        try:
            # 获取爬虫配置的间隔时间（分钟）
            interval = crawler.source_config.get('interval', 30)  # 默认30分钟
            
            # 获取上次运行时间
            last_run_key = f"crawler_last_run:{crawler.source_config['name']}"
            last_run = self.redis_client.get(last_run_key)
            
            if not last_run:
                return True
                
            # 计算是否达到间隔时间
            last_run_time = datetime.fromtimestamp(float(last_run))
            now = datetime.now()
            minutes_passed = (now - last_run_time).total_seconds() / 60
            
            return minutes_passed >= interval
            
        except Exception as e:
            logging.error(f"检查爬虫运行时间失败: {str(e)}")
            return True  # 出错时默认运行

    def update_crawler_last_run(self, crawler):
        """更新爬虫最后运行时间"""
        try:
            last_run_key = f"crawler_last_run:{crawler.source_config['name']}"
            self.redis_client.set(last_run_key, datetime.now().timestamp())
        except Exception as e:
            logging.error(f"更新爬虫运行时间失败: {str(e)}")

    def run(self):
        """运行调度器"""
        while True:
            try:
                for crawler in self.crawlers:
                    if self.should_run_crawler(crawler):
                        logging.info(f"开始运行爬虫: {crawler.source_config['name']}")
                        crawler.crawl()
                        self.update_crawler_last_run(crawler)
                    else:
                        logging.info(f"跳过爬虫 {crawler.source_config['name']}: 未到运行时间")
                
                # 休眠一段时间再检查
                time.sleep(60)  # 每分钟检查一次
                
            except Exception as e:
                logging.error(f"调度器运行出错: {str(e)}")
                time.sleep(60)  # 出错时等待一分钟后继续

if __name__ == '__main__':
    logging.basicConfig(
        level=logging.INFO,
        format='%(asctime)s - %(levelname)s - %(message)s'
    )
    
    scheduler = Scheduler()
    scheduler.run() 