# celery_worker.py
from celery import Celery
from config import Config, celery_config
import requests
import json
from datetime import datetime
import logging
import time
from typing import Optional, Dict
# 在 celery_worker.py 文件的导入部分添加
from ai_processing.summary_processor import (
    BaiduNewsSummarizer,
    generate_board_summary
)

# 配置日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# 创建Celery实例
celery = Celery(__name__)
celery.config_from_object(celery_config)

# 延迟导入Flask相关模块以避免循环导入
_flask_app = None

def get_flask_app():
    """
    延迟创建Flask应用实例

    Returns:
        Flask应用实例
    """
    global _flask_app
    if _flask_app is None:
        # 在这里导入以避免循环导入
        from app import create_app
        _flask_app = create_app()
    return _flask_app


def calculate_initial_score(news):
    """
    计算新闻初始分数

    Args:
        news (ProcessedNews): 已处理的新闻对象

    Returns:
        float: 计算得到的初始分数值
    """
    score = 0

    # 实体权重分
    if news.involved_brands:
        for brand in news.involved_brands:
            weight = Config.ENTITY_WEIGHTS.get(brand, Config.ENTITY_WEIGHTS.get('default', 1))
            score += weight

    # 时效性分（简单实现）
    if news.publish_date:
        hours_old = (datetime.utcnow() - news.publish_date).total_seconds() / 3600
        # 使用配置中的时效性计算参数
        time_base_score = getattr(Config, 'TIME_BASE_SCORE', 24)
        time_score = time_base_score / (hours_old + 1)
        score += time_score

    return score

@celery.task(bind=True)
def process_news_task(self, news_id):
    """
    处理单条新闻的Celery任务
    """
    try:
        flask_app = get_flask_app()

        with flask_app.app_context():
            # 延迟导入模型以避免循环导入
            from models.models import RawNews, ProcessedNews
            from ai_processing.ner_processor import extract_entities, map_entities_to_brands
            from ai_processing.topic_classifier import classify_topic

            raw_news = RawNews.query.get(news_id)
            if not raw_news or raw_news.processed:
                logger.info(f"新闻 {news_id} 不存在或已处理")
                return

            # 1. 生成详细摘要
            summary = None
            if Config.BAIDU_SUMMARY_ENABLED and Config.BAIDU_AI_API_KEY:
                try:
                    summarizer = BaiduNewsSummarizer()
                    summary = summarizer.summarize(raw_news.content)
                except Exception as e:
                    logger.warning(f"百度AI摘要生成失败: {e}")

            if not summary:
                summary = "摘要生成失败"

            logger.info(f"为新闻 {news_id} 生成摘要完成")

            # 2. 实体识别和分类
            full_text = raw_news.title + " " + raw_news.content
            entities = extract_entities(full_text)
            brands, brand_type = map_entities_to_brands(entities)
            logger.info(f"为新闻 {news_id} 完成实体识别")

            # 3. 主题分类
            topics = classify_topic(full_text)
            logger.info(f"为新闻 {news_id} 完成主题分类")

            # 4. 生成看板摘要
            board_summary = generate_board_summary(raw_news.title, raw_news.content)
            logger.info(f"为新闻 {news_id} 生成看板摘要完成")

            # 5. 创建处理后的新闻记录
            processed_news = ProcessedNews(
                raw_news_id=raw_news.id,
                title=raw_news.title,
                publish_date=raw_news.publish_date,
                involved_entities=entities if entities else [],  # 确保是列表
                involved_brands=brands if brands else [],  # 确保是列表
                brand_type=brand_type if brand_type else 'unknown',  # 确保是字符串
                topics=topics if topics else [],  # 确保是列表
                event_description=summary,  # 确保是字符串
                board_summary=board_summary if board_summary else raw_news.title[:50],  # 确保有值
                category='其他'  # 设置默认分类
            )

            # 6. 计算初始分数
            processed_news.news_score = calculate_initial_score(processed_news)

            # 获取数据库实例
            from app import db
            db.session.add(processed_news)
            raw_news.processed = True
            db.session.commit()

            logger.info(f"新闻 {news_id} 处理完成")

    except Exception as exc:
        logger.error(f"处理新闻 {news_id} 时发生错误: {exc}")
        # 重试机制
        raise self.retry(exc=exc, countdown=60, max_retries=3)

@celery.task
def update_news_scores():
    """
    定时更新所有新闻的分数
    """
    try:
        flask_app = get_flask_app()

        with flask_app.app_context():
            from models.models import ProcessedNews
            from app import db

            all_news = ProcessedNews.query.all()
            updated_count = 0

            for news in all_news:
                try:
                    # 重新计算时效性分
                    if news.publish_date:
                        hours_old = (datetime.utcnow() - news.publish_date).total_seconds() / 3600
                        time_base_score = getattr(Config, 'TIME_BASE_SCORE', 24)
                        time_score = time_base_score / (hours_old + 1)

                        # 保留其他分数，只更新时效性部分
                        news.news_score = news.news_score + (time_score - time_base_score / (hours_old + 2))

                    updated_count += 1

                except Exception as e:
                    logger.error(f"更新新闻 {news.id} 分数时出错: {e}")

            db.session.commit()
            logger.info(f"成功更新 {updated_count} 条新闻的分数")

    except Exception as e:
        logger.error(f"更新新闻分数任务失败: {e}")
        raise


import argparse
import signal
import sys
import logging

def main():
    """主函数，用于启动Celery worker"""
    # 设置命令行参数解析
    parser = argparse.ArgumentParser(description='新闻AI处理系统 - Celery Worker')
    parser.add_argument('--log-level', default='INFO',
                       choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
                       help='日志级别')
    parser.add_argument('--concurrency', type=int, default=1,
                       help='Worker并发数')
    parser.add_argument('--hostname', default='news-ai-worker@%h',
                       help='Worker主机名')

    args = parser.parse_args()

    # 配置日志级别
    logging.basicConfig(level=getattr(logging, args.log_level))

    # 注册信号处理器实现优雅关闭
    def signal_handler(sig, frame):
        logger.info(f'接收到信号 {sig}，正在优雅关闭...')
        sys.exit(0)

    signal.signal(signal.SIGINT, signal_handler)
    signal.signal(signal.SIGTERM, signal_handler)

    try:
        logger.info("正在启动Celery Worker...")
        logger.info(f"日志级别: {args.log_level}")
        logger.info(f"并发数: {args.concurrency}")

        # 启动Celery worker
        celery.worker_main([
            'worker',
            '--loglevel=%s' % args.log_level.lower(),
            '--concurrency=%d' % args.concurrency,
            '--hostname=%s' % args.hostname
        ])

    except KeyboardInterrupt:
        logger.info("用户中断程序")
    except Exception as e:
        logger.error(f"Celery Worker启动失败: {e}")
        return 1

    return 0

if __name__ == '__main__':
    sys.exit(main())

