"""
财经新闻相关的Celery任务
"""
import logging
import datetime
from celery import group
from celery.utils.log import get_task_logger
from sqlalchemy import func

from modules.tasks.celery_app import app
from modules.models.base import get_db
from modules.models.stock import SecurityInfo
from modules.data_collector.news_fundamental_collector import EconomicNews
from modules.data_collector import collector

# 配置任务日志
logger = get_task_logger(__name__)


@app.task(bind=True, queue='stock_data', max_retries=3, retry_backoff=60)
def collect_financial_news(self, source="sina", limit=50):
    """
    采集财经新闻
    
    Args:
        source: 新闻来源，支持"sina"、"eastmoney"、"10jqka"等
        limit: 采集数量限制
    """
    logger.info(f"开始采集{source}财经新闻，数量限制: {limit}")
    try:
        count = collector.collect_market_data(
            "financial_news", 
            source=source,
            limit=limit
        )
        logger.info(f"成功采集{count}条{source}财经新闻")
        return {
            "status": "success",
            "source": source,
            "count": count
        }
    except Exception as e:
        logger.error(f"采集{source}财经新闻失败: {str(e)}")
        raise self.retry(exc=e)


@app.task(bind=True, queue='stock_data', max_retries=3, retry_backoff=60)
def collect_stock_notice(self, stock_code, start_date=None, end_date=None):
    """
    采集股票公告
    
    Args:
        stock_code: 股票代码
        start_date: 开始日期，格式：YYYYMMDD
        end_date: 结束日期，格式：YYYYMMDD
    """
    logger.info(f"开始采集股票 {stock_code} 的公告")
    try:
        count = collector.collect_market_data(
            "stock_notice", 
            code=stock_code,
            start_date=start_date,
            end_date=end_date
        )
        logger.info(f"成功采集{count}条股票 {stock_code} 公告")
        return {
            "status": "success",
            "stock_code": stock_code,
            "count": count
        }
    except Exception as e:
        logger.error(f"采集股票 {stock_code} 公告失败: {str(e)}")
        raise self.retry(exc=e)


@app.task(bind=True, queue='stock_data')
def schedule_news_collection(self, sources=None, limit_per_source=50):
    """
    调度定时新闻采集任务
    
    Args:
        sources: 新闻来源列表，默认为["sina", "eastmoney"]
        limit_per_source: 每个来源采集的新闻数量
    """
    logger.info("开始调度新闻采集任务")
    
    try:
        if sources is None:
            sources = ["sina", "eastmoney"]
        
        # 创建并行任务组
        tasks = []
        for source in sources:
            tasks.append(collect_financial_news.s(source, limit_per_source))
        
        # 执行任务组
        group(tasks).apply_async()
        
        logger.info(f"已调度 {len(sources)} 个新闻源的采集任务")
        return {
            "status": "success",
            "message": f"已调度 {len(sources)} 个新闻源的采集任务",
            "sources": sources
        }
    except Exception as e:
        logger.error(f"调度新闻采集任务失败: {str(e)}")
        raise


# 测试函数
def test_news_tasks():
    """测试新闻相关任务函数"""
    # 采集财经新闻
    news_result = collect_financial_news.delay("sina", 10)
    print(f"采集新浪财经新闻任务已提交，任务ID: {news_result.id}")
    
    # 等待任务完成
    result = news_result.get(timeout=120)
    print(f"采集新浪财经新闻结果: {result}")
    
    # 采集股票公告
    db = next(get_db())
    stock = db.query(SecurityInfo).filter(
        SecurityInfo.type == 'stock'
    ).first()
    db.close()
    
    if stock:
        notice_result = collect_stock_notice.delay(stock.code)
        print(f"采集股票 {stock.code} 公告任务已提交，任务ID: {notice_result.id}")
        
        # 等待任务完成
        result = notice_result.get(timeout=120)
        print(f"采集股票公告结果: {result}")
    else:
        print("没有找到股票数据，无法测试采集任务")


if __name__ == "__main__":
    # 配置日志
    logging.basicConfig(
        level=logging.INFO,
        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
    )
    
    # 执行测试
    test_news_tasks()