from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ThreadPoolExecutor
from pytz import timezone
import logging
import functools
import datetime

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.StreamHandler(),
        logging.FileHandler('data/scheduler.log')
    ]
)
logger = logging.getLogger('scheduler')

# 日志装饰器
def log_job_execution(func):
    @functools.wraps(func)
    async def wrapper(*args, **kwargs):
        job_name = func.__name__
        start_time = datetime.datetime.now()
        logger.info(f"Starting job: {job_name} at {start_time}")
        try:
            result = await func(*args, **kwargs)
            end_time = datetime.datetime.now()
            duration = end_time - start_time
            logger.info(f"Job {job_name} completed successfully in {duration}")
            return result
        except Exception as e:
            logger.error(f"Job {job_name} failed with error: {str(e)}", exc_info=True)
            raise
    return wrapper

from app.worker.celery_worker import (
    run_baidu_crawler,
    run_bilibili_crawler,
    run_hackernews_crawler,
    run_tieba_crawler,
    run_cankaoxiaoxi_crawler,
    run_xueqiu_crawler,
    run_douyin_crawler,
    run_fastbull_express_crawler,
    run_fastbull_news_crawler,
    run_gelonghui_crawler,
    run_thepaper_crawler,
    run_v2ex_crawler,
    run_ithome_crawler,
    run_zhihu_crawler,
    run_judge_worker,
    run_toutiao_crawler,
    run_weibo_crawler,
    run_ttl_worker,
    run_kr36_crawler
)

# 为所有爬虫函数添加日志装饰器
run_baidu_crawler = log_job_execution(run_baidu_crawler)
run_bilibili_crawler = log_job_execution(run_bilibili_crawler)
run_hackernews_crawler = log_job_execution(run_hackernews_crawler)
run_tieba_crawler = log_job_execution(run_tieba_crawler)
run_cankaoxiaoxi_crawler = log_job_execution(run_cankaoxiaoxi_crawler)
run_xueqiu_crawler = log_job_execution(run_xueqiu_crawler)
run_douyin_crawler = log_job_execution(run_douyin_crawler)
run_fastbull_express_crawler = log_job_execution(run_fastbull_express_crawler)
run_fastbull_news_crawler = log_job_execution(run_fastbull_news_crawler)
run_gelonghui_crawler = log_job_execution(run_gelonghui_crawler)
run_thepaper_crawler = log_job_execution(run_thepaper_crawler)
run_v2ex_crawler = log_job_execution(run_v2ex_crawler)
run_ithome_crawler = log_job_execution(run_ithome_crawler)
run_zhihu_crawler = log_job_execution(run_zhihu_crawler)
run_judge_worker = log_job_execution(run_judge_worker)
run_toutiao_crawler = log_job_execution(run_toutiao_crawler)
run_weibo_crawler = log_job_execution(run_weibo_crawler)
run_ttl_worker = log_job_execution(run_ttl_worker)
run_kr36_crawler = log_job_execution(run_kr36_crawler)
# 配置 jobstores
jobstores = {
    'default': SQLAlchemyJobStore(url='sqlite:///data/jobs.sqlite')
}

# 配置执行器
executors = {
    'default': ThreadPoolExecutor(20)
}

job_defaults = {
    'coalesce': False,
    'max_instances': 3
}

scheduler = AsyncIOScheduler(
    jobstores=jobstores,
    executors=executors,
    job_defaults=job_defaults,
    timezone=timezone('Asia/Shanghai')
)

def init_scheduler():
    """初始化并添加所有定时任务"""
    
    logger.info("Initializing scheduler and adding jobs...")
    
    # 清除所有现有任务
    scheduler.remove_all_jobs()
    logger.info("Removed all existing jobs")
    
    # 百度热搜 - 每12小时运行一次
    scheduler.add_job(
        run_baidu_crawler,
        'cron',
        hour='*/12',
        minute='0',
        id='baidu_crawler',
        replace_existing=True
    )
    logger.info("Added baidu_crawler job")
    
    # 微博热搜 - 每6小时运行一次
    scheduler.add_job(
        run_weibo_crawler,
        'cron',
        hour='*/6',
        minute='1',
        id='weibo_crawler',
        replace_existing=True
    )
    logger.info("Added weibo_crawler job")
    
    # 头条热榜 - 每6小时运行一次
    scheduler.add_job(
        run_toutiao_crawler,
        'cron',
        hour='*/6',
        minute='2',
        id='toutiao_crawler',
        replace_existing=True
    )
    logger.info("Added toutiao_crawler job")
    
    # B站热搜 - 每12小时运行一次
    scheduler.add_job(
        run_bilibili_crawler,
        'cron',
        hour='*/12',
        minute='5',
        id='bilibili_crawler',
        replace_existing=True
    )
    logger.info("Added bilibili_crawler job")
    
    # Hacker News - 每6小时运行一次
    scheduler.add_job(
        run_hackernews_crawler,
        'cron',
        hour='*/6',
        minute='10',
        id='hackernews_crawler',
        replace_existing=True
    )
    logger.info("Added hackernews_crawler job")
    
    # 百度贴吧热门 - 每12小时运行一次
    scheduler.add_job(
        run_tieba_crawler,
        'cron',
        hour='*/12',
        minute='15',
        id='tieba_crawler',
        replace_existing=True
    )
    logger.info("Added tieba_crawler job")
    
    # 参考消息 - 每6小时运行一次
    scheduler.add_job(
        run_cankaoxiaoxi_crawler,
        'cron',
        hour='*/6',
        minute='20',
        id='cankaoxiaoxi_crawler',
        replace_existing=True
    )
    logger.info("Added cankaoxiaoxi_crawler job")
    
    # 雪球热门股票 - 每1小时运行一次
    scheduler.add_job(
        run_xueqiu_crawler,
        'cron',
        hour='*',
        minute='25',
        id='xueqiu_crawler',
        replace_existing=True
    )
    logger.info("Added xueqiu_crawler job")
    
    # 抖音热搜 - 每6小时运行一次
    scheduler.add_job(
        run_douyin_crawler,
        'cron',
        hour='*/6',
        minute='30',
        id='douyin_crawler',
        replace_existing=True
    )
    logger.info("Added douyin_crawler job")
    
    # FastBull快讯 - 每1小时运行一次
    scheduler.add_job(
        run_fastbull_express_crawler,
        'cron',
        hour='*',
        minute='35',
        id='fastbull_express_crawler',
        replace_existing=True
    )
    logger.info("Added fastbull_express_crawler job")
    
    # FastBull新闻 - 每6小时运行一次
    scheduler.add_job(
        run_fastbull_news_crawler,
        'cron',
        hour='*/6',
        minute='40',
        id='fastbull_news_crawler',
        replace_existing=True
    )
    logger.info("Added fastbull_news_crawler job")
    
    # 格隆汇 - 每6小时运行一次
    scheduler.add_job(
        run_gelonghui_crawler,
        'cron',
        hour='*/6',
        minute='45',
        id='gelonghui_crawler',
        replace_existing=True
    )
    logger.info("Added gelonghui_crawler job")
    
    # 澎湃新闻 - 每6小时运行一次
    scheduler.add_job(
        run_thepaper_crawler,
        'cron',
        hour='*/6',
        minute='50',
        id='thepaper_crawler',
        replace_existing=True
    )
    logger.info("Added thepaper_crawler job")
    
    # V2EX - 每6小时运行一次
    scheduler.add_job(
        run_v2ex_crawler,
        'cron',
        hour='*/6',
        minute='55',
        id='v2ex_crawler',
        replace_existing=True
    )
    logger.info("Added v2ex_crawler job")
    
    # IT之家 - 每4小时运行一次
    scheduler.add_job(
        run_ithome_crawler,
        'cron',
        hour='*/4',
        minute='0',
        id='ithome_crawler',
        replace_existing=True
    )
    logger.info("Added ithome_crawler job")

    # 36氪 - 每1小时运行一次
    scheduler.add_job(
        run_kr36_crawler,
        'cron',
        hour='*/1',
        minute='0',
        id='kr36_crawler',
        replace_existing=True
    )
    logger.info("Added kr36_crawler job")
    
    # 知乎热榜 - 每4小时运行一次
    scheduler.add_job(
        run_zhihu_crawler,
        'cron',
        hour='*/4',
        minute='5',
        id='zhihu_crawler',
        replace_existing=True
    )
    logger.info("Added zhihu_crawler job")
    
    # 内容评判任务 - 每30分钟运行一次
    scheduler.add_job(
        run_judge_worker,
        'cron',
        minute='*/30',
        id='judge_worker',
        replace_existing=True
    )
    logger.info("Added judge_worker job")
    
    # 内容评判任务 - 每30分钟运行一次
    scheduler.add_job(
        run_ttl_worker,
        'cron',
        minute='*/30',
        id='ttl_worker',
        replace_existing=True
    )
    logger.info("Added ttl_worker job")
    
    # 启动调度器
    scheduler.start()
    logger.info("Scheduler started successfully") 
