"""
使用API和RSS方式获取热点数据的服务
支持多个平台的热点数据获取
"""
import asyncio
import json
import logging
import hashlib
from datetime import datetime, timedelta
from typing import Dict, Any, List, Optional
import aiohttp
import feedparser
from bs4 import BeautifulSoup

logger = logging.getLogger(__name__)


class APIHotService:
    """API热点服务 - 使用合法的API和RSS获取热点"""
    
    def __init__(self):
        self.session = None
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 Chrome/120.0.0.0'
        }
    
    async def __aenter__(self):
        self.session = aiohttp.ClientSession()
        return self
    
    async def __aexit__(self, *args):
        if self.session:
            await self.session.close()
    
    def generate_hash(self, text: str) -> str:
        """生成哈希值"""
        return hashlib.md5(text.encode()).hexdigest()
    
    async def fetch_baidu_hot(self) -> List[Dict[str, Any]]:
        """获取百度热搜 - 使用百度新闻RSS"""
        url = "http://news.baidu.com/ns?word=热搜&tn=newsrss&sr=0&cl=2&rn=20&ct=0"
        
        try:
            async with self.session.get(url, headers=self.headers) as response:
                if response.status == 200:
                    content = await response.text()
                    feed = feedparser.parse(content)
                    
                    hot_list = []
                    for idx, entry in enumerate(feed.entries[:50], 1):
                        hot_item = {
                            'rank': idx,
                            'title': entry.title,
                            'url': entry.link,
                            'description': entry.get('summary', ''),
                            'source': 'baidu',
                            'platform': 'baidu',
                            'heat_value': 50 - idx,  # 简单的热度计算
                            'fetch_time': datetime.utcnow().isoformat() + 'Z'
                        }
                        hot_list.append(hot_item)
                    
                    logger.info(f"获取百度热搜 {len(hot_list)} 条")
                    return hot_list
        except Exception as e:
            logger.error(f"获取百度热搜失败: {e}")
        
        return []
    
    async def fetch_github_trending(self) -> List[Dict[str, Any]]:
        """获取GitHub趋势 - 科技热点"""
        url = "https://api.github.com/search/repositories"
        params = {
            'q': 'created:>' + (datetime.now() - timedelta(days=7)).strftime('%Y-%m-%d'),
            'sort': 'stars',
            'order': 'desc',
            'per_page': 30
        }
        
        try:
            async with self.session.get(url, params=params) as response:
                if response.status == 200:
                    data = await response.json()
                    
                    hot_list = []
                    for idx, repo in enumerate(data.get('items', [])[:30], 1):
                        hot_item = {
                            'rank': idx,
                            'title': f"{repo['name']} - {repo.get('description', '')[:100]}",
                            'url': repo['html_url'],
                            'source': 'github',
                            'platform': 'tech',
                            'heat_value': repo['stargazers_count'],
                            'metadata': {
                                'stars': repo['stargazers_count'],
                                'forks': repo['forks_count'],
                                'language': repo.get('language', 'Unknown')
                            },
                            'fetch_time': datetime.utcnow().isoformat() + 'Z'
                        }
                        hot_list.append(hot_item)
                    
                    logger.info(f"获取GitHub趋势 {len(hot_list)} 条")
                    return hot_list
        except Exception as e:
            logger.error(f"获取GitHub趋势失败: {e}")
        
        return []
    
    async def fetch_v2ex_hot(self) -> List[Dict[str, Any]]:
        """获取V2EX热门话题"""
        url = "https://www.v2ex.com/api/topics/hot.json"
        
        try:
            async with self.session.get(url) as response:
                if response.status == 200:
                    data = await response.json()
                    
                    hot_list = []
                    for idx, topic in enumerate(data[:30], 1):
                        hot_item = {
                            'rank': idx,
                            'title': topic['title'],
                            'url': topic['url'],
                            'source': 'v2ex',
                            'platform': 'tech',
                            'heat_value': topic.get('replies', 0),
                            'metadata': {
                                'replies': topic.get('replies', 0),
                                'node': topic.get('node', {}).get('title', '')
                            },
                            'fetch_time': datetime.utcnow().isoformat() + 'Z'
                        }
                        hot_list.append(hot_item)
                    
                    logger.info(f"获取V2EX热门 {len(hot_list)} 条")
                    return hot_list
        except Exception as e:
            logger.error(f"获取V2EX热门失败: {e}")
        
        return []
    
    async def fetch_hackernews_hot(self) -> List[Dict[str, Any]]:
        """获取Hacker News热门"""
        url = "https://hacker-news.firebaseio.com/v0/topstories.json"
        
        try:
            async with self.session.get(url) as response:
                if response.status == 200:
                    story_ids = await response.json()
                    
                    hot_list = []
                    # 只获取前20个
                    for idx, story_id in enumerate(story_ids[:20], 1):
                        story_url = f"https://hacker-news.firebaseio.com/v0/item/{story_id}.json"
                        async with self.session.get(story_url) as story_response:
                            if story_response.status == 200:
                                story = await story_response.json()
                                if story and story.get('title'):
                                    hot_item = {
                                        'rank': idx,
                                        'title': story['title'],
                                        'url': story.get('url', f"https://news.ycombinator.com/item?id={story_id}"),
                                        'source': 'hackernews',
                                        'platform': 'tech',
                                        'heat_value': story.get('score', 0),
                                        'metadata': {
                                            'points': story.get('score', 0),
                                            'comments': story.get('descendants', 0)
                                        },
                                        'fetch_time': datetime.utcnow().isoformat() + 'Z'
                                    }
                                    hot_list.append(hot_item)
                    
                    logger.info(f"获取Hacker News热门 {len(hot_list)} 条")
                    return hot_list
        except Exception as e:
            logger.error(f"获取Hacker News失败: {e}")
        
        return []
    
    async def fetch_36kr_news(self) -> List[Dict[str, Any]]:
        """获取36氪快讯 - 创投热点"""
        url = "https://36kr.com/newsflashes"
        
        try:
            async with self.session.get(url, headers=self.headers) as response:
                if response.status == 200:
                    html = await response.text()
                    soup = BeautifulSoup(html, 'html.parser')
                    
                    # 查找包含数据的script标签
                    scripts = soup.find_all('script')
                    hot_list = []
                    
                    for script in scripts:
                        if script.string and 'window.initialState' in script.string:
                            # 提取JSON数据
                            import re
                            match = re.search(r'window\.initialState\s*=\s*({.*?});', script.string)
                            if match:
                                try:
                                    data = json.loads(match.group(1))
                                    # 解析新闻数据
                                    news_items = data.get('newsflashList', {}).get('data', {}).get('items', [])
                                    for idx, item in enumerate(news_items[:20], 1):
                                        hot_item = {
                                            'rank': idx,
                                            'title': item.get('title', ''),
                                            'description': item.get('description', ''),
                                            'url': f"https://36kr.com/newsflashes/{item.get('id', '')}",
                                            'source': '36kr',
                                            'platform': 'startup',
                                            'heat_value': 20 - idx,
                                            'fetch_time': datetime.utcnow().isoformat() + 'Z'
                                        }
                                        hot_list.append(hot_item)
                                except json.JSONDecodeError:
                                    pass
                    
                    logger.info(f"获取36氪快讯 {len(hot_list)} 条")
                    return hot_list
        except Exception as e:
            logger.error(f"获取36氪快讯失败: {e}")
        
        return []
    
    async def fetch_all_hot_topics(self) -> Dict[str, List[Dict[str, Any]]]:
        """获取所有平台的热点"""
        results = {}
        
        # 并发获取所有平台数据
        tasks = [
            ('baidu', self.fetch_baidu_hot()),
            ('github', self.fetch_github_trending()),
            ('v2ex', self.fetch_v2ex_hot()),
            ('hackernews', self.fetch_hackernews_hot()),
            ('36kr', self.fetch_36kr_news())
        ]
        
        for platform, task in tasks:
            try:
                results[platform] = await task
            except Exception as e:
                logger.error(f"获取{platform}数据失败: {e}")
                results[platform] = []
        
        # 统计结果
        total = sum(len(items) for items in results.values())
        logger.info(f"总共获取 {total} 条热点数据")
        
        return results


async def main():
    """测试主函数"""
    logging.basicConfig(level=logging.INFO)
    
    async with APIHotService() as service:
        results = await service.fetch_all_hot_topics()
        
        print("\n=== 热点数据获取结果 ===")
        for platform, items in results.items():
            print(f"\n{platform.upper()}: {len(items)} 条")
            if items:
                # 显示前3条
                for item in items[:3]:
                    print(f"  - {item['title'][:50]}...")
        
        return sum(len(items) for items in results.values()) > 0


if __name__ == "__main__":
    import asyncio
    result = asyncio.run(main())
    exit(0 if result else 1)