#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
简化版新闻爬取器
从公开的财经新闻源获取数据
"""

import requests
import json
import time
import random
from datetime import datetime, timedelta
from typing import List, Dict, Optional
from loguru import logger

# 导入新的解析器模块
from .PerformanceForecastParser import PerformanceForecastParser
from .AssetRestructureParser import AssetRestructureParser

class SimpleNewsCollector:
    """简化版新闻收集器"""
    
    def __init__(self):
        """初始化新闻收集器"""
        self.session = requests.Session()
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
            'Accept': 'application/json, text/plain, */*',
            'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
            'Accept-Encoding': 'gzip, deflate, br',
            'Connection': 'keep-alive',
        }
        self.session.headers.update(self.headers)
    

    def collect_financial_news(self, sources: List[str] = None, total_count: int = 30) -> List[Dict]:
        """
        收集财经新闻（仅支持真实爬取）
        
        Args:
            sources: 新闻源列表 (支持 'performance_forecast', 'asset_restructure', 'general')
            total_count: 总新闻数量
            
        Returns:
            新闻列表
        """
        if sources is None:
            sources = ['performance_forecast', 'asset_restructure', 'general']
        
        all_news = []
        per_source_count = max(1, total_count // len(sources))
        
        for source in sources:
            try:
                # 尝试获取真实新闻数据
                news = self._fetch_real_financial_news(per_source_count, source)
                if news:
                    all_news.extend(news)
                
                # 源之间延时
                time.sleep(random.uniform(1, 2))
                
            except Exception as e:
                logger.error(f"从源 {source} 获取新闻失败: {str(e)}")
                continue
        
        # 去重
        unique_news = []
        seen_titles = set()
        
        for news in all_news:
            title = news.get('title', '')
            if title and title not in seen_titles:
                seen_titles.add(title)
                unique_news.append(news)
        
        logger.info(f"收集完成，共获取 {len(unique_news)} 条唯一新闻")
        return unique_news[:total_count]  # 限制总数量
    
    def _fetch_real_financial_news(self, count: int = 20, source_type: str = "annual_report") -> List[Dict]:
        """
        获取真实的财经新闻数据
        
        Args:
            count: 新闻数量
            source_type: 新闻类型 ("annual_report": 年报相关, "general": 一般新闻)
            
        Returns:
            新闻列表
        """
        try:
            logger.info(f"获取{source_type}类型的财经新闻...")
            
            # 根据源类型选择对应的解析器
            if source_type == "performance_forecast":
                # 使用业绩预告解析器
                parser = PerformanceForecastParser()
                news_list = parser.get_news_list(tag="业绩预告", page=1)
                return news_list[:count] if news_list else []
            elif source_type == "asset_restructure":
                # 使用资产重组解析器
                parser = AssetRestructureParser()
                news_list = parser.get_news_list(tag="资产重组", page=1)
                return news_list[:count] if news_list else []
            else:
                # 默认使用业绩预告
                parser = PerformanceForecastParser()
                news_list = parser.get_news_list(tag="业绩预告", page=1)
                return news_list[:count] if news_list else []
            
        except Exception as e:
            logger.error(f"获取财经新闻失败: {str(e)}")
            return []