"""
数据收集器

提供多源数据收集、数据质量检查、数据缓存等功能。
"""

import asyncio
import logging
from datetime import datetime, timedelta
from decimal import Decimal
from enum import Enum
from typing import Dict, List, Optional, Callable, Any, Set, Union
from dataclasses import dataclass, field
import json
import time
import urllib.request
import urllib.parse
import urllib.error


class DataSource(Enum):
    """数据源类型"""
    TUSHARE = "tushare"         # Tushare数据源
    AKSHARE = "akshare"         # AKShare数据源
    WIND = "wind"               # Wind数据源
    EASTMONEY = "eastmoney"     # 东方财富
    SINA = "sina"               # 新浪财经
    TENCENT = "tencent"         # 腾讯财经
    YAHOO = "yahoo"             # Yahoo Finance
    CUSTOM = "custom"           # 自定义数据源


class DataQuality(Enum):
    """数据质量等级"""
    EXCELLENT = "excellent"     # 优秀
    GOOD = "good"              # 良好
    FAIR = "fair"              # 一般
    POOR = "poor"              # 较差
    INVALID = "invalid"        # 无效


@dataclass
class CollectionConfig:
    """收集配置"""
    symbols: List[str] = field(default_factory=list)
    data_sources: List[DataSource] = field(default_factory=lambda: [DataSource.TUSHARE])
    collection_interval: float = 1.0  # 收集间隔（秒）
    retry_count: int = 3
    timeout: float = 10.0
    enable_cache: bool = True
    cache_ttl: int = 60  # 缓存TTL（秒）
    quality_threshold: DataQuality = DataQuality.FAIR
    max_concurrent: int = 10  # 最大并发数


@dataclass
class DataPoint:
    """数据点"""
    symbol: str
    timestamp: datetime
    source: DataSource
    data: Dict[str, Any]
    quality: DataQuality = DataQuality.GOOD
    latency: float = 0.0  # 延迟（毫秒）
    
    def to_dict(self) -> Dict[str, Any]:
        """转换为字典"""
        return {
            'symbol': self.symbol,
            'timestamp': self.timestamp.isoformat(),
            'source': self.source.value,
            'data': self.data,
            'quality': self.quality.value,
            'latency': self.latency
        }


@dataclass
class CollectionStats:
    """收集统计"""
    total_requests: int = 0
    successful_requests: int = 0
    failed_requests: int = 0
    average_latency: float = 0.0
    data_quality_distribution: Dict[DataQuality, int] = field(default_factory=dict)
    source_stats: Dict[DataSource, Dict[str, int]] = field(default_factory=dict)
    last_collection_time: Optional[datetime] = None
    
    def success_rate(self) -> float:
        """成功率"""
        if self.total_requests == 0:
            return 0.0
        return self.successful_requests / self.total_requests
    
    def to_dict(self) -> Dict[str, Any]:
        """转换为字典"""
        return {
            'total_requests': self.total_requests,
            'successful_requests': self.successful_requests,
            'failed_requests': self.failed_requests,
            'success_rate': self.success_rate(),
            'average_latency': self.average_latency,
            'data_quality_distribution': {q.value: count for q, count in self.data_quality_distribution.items()},
            'source_stats': {s.value: stats for s, stats in self.source_stats.items()},
            'last_collection_time': self.last_collection_time.isoformat() if self.last_collection_time else None
        }


class DataCollector:
    """数据收集器"""
    
    def __init__(self, config: Optional[CollectionConfig] = None):
        self.config = config or CollectionConfig()
        self.logger = logging.getLogger(__name__)
        
        # 数据存储
        self.data_cache: Dict[str, Dict[str, DataPoint]] = {}  # symbol -> {source: data}
        self.collection_stats = CollectionStats()
        
        # 数据源处理器
        self.source_handlers: Dict[DataSource, Callable] = {
            DataSource.TUSHARE: self._collect_from_tushare,
            DataSource.AKSHARE: self._collect_from_akshare,
            DataSource.SINA: self._collect_from_sina,
            DataSource.TENCENT: self._collect_from_tencent,
            DataSource.CUSTOM: self._collect_from_custom
        }
        
        # 运行状态
        self.is_running = False
        self.collection_task: Optional[asyncio.Task] = None
        self.semaphore = asyncio.Semaphore(self.config.max_concurrent)
        self._lock = asyncio.Lock()
        
        # HTTP请求配置
        self.http_timeout = self.config.timeout
    
    async def start(self):
        """启动收集器"""
        if self.is_running:
            return
        
        self.is_running = True
        self.collection_task = asyncio.create_task(self._collection_loop())
        self.logger.info("数据收集器已启动")
    
    async def stop(self):
        """停止收集器"""
        if not self.is_running:
            return
        
        self.is_running = False
        if self.collection_task:
            self.collection_task.cancel()
            try:
                await self.collection_task
            except asyncio.CancelledError:
                pass
        
        # 清理资源
        pass
        
        self.logger.info("数据收集器已停止")
    
    async def collect_data(self, symbol: str, sources: Optional[List[DataSource]] = None) -> Dict[DataSource, Optional[DataPoint]]:
        """收集指定标的的数据"""
        if sources is None:
            sources = self.config.data_sources
        
        results = {}
        tasks = []
        
        for source in sources:
            if source in self.source_handlers:
                task = asyncio.create_task(self._collect_single_source(symbol, source))
                tasks.append((source, task))
        
        # 等待所有任务完成
        for source, task in tasks:
            try:
                result = await task
                results[source] = result
            except Exception as e:
                self.logger.error(f"收集数据失败 {symbol} from {source.value}: {e}")
                results[source] = None
        
        return results
    
    async def _collect_single_source(self, symbol: str, source: DataSource) -> Optional[DataPoint]:
        """从单个数据源收集数据"""
        async with self.semaphore:
            start_time = time.time()
            
            try:
                # 检查缓存
                if self.config.enable_cache and await self._check_cache(symbol, source):
                    cached_data = self.data_cache[symbol][source.value]
                    self.logger.debug(f"使用缓存数据: {symbol} from {source.value}")
                    return cached_data
                
                # 调用数据源处理器
                handler = self.source_handlers.get(source)
                if not handler:
                    self.logger.warning(f"未找到数据源处理器: {source.value}")
                    return None
                
                data = await handler(symbol)
                if not data:
                    return None
                
                # 计算延迟
                latency = (time.time() - start_time) * 1000
                
                # 评估数据质量
                quality = await self._assess_data_quality(data, source)
                
                # 创建数据点
                data_point = DataPoint(
                    symbol=symbol,
                    timestamp=datetime.now(),
                    source=source,
                    data=data,
                    quality=quality,
                    latency=latency
                )
                
                # 缓存数据
                if self.config.enable_cache:
                    await self._cache_data(data_point)
                
                # 更新统计
                await self._update_stats(data_point, success=True)
                
                return data_point
                
            except Exception as e:
                self.logger.error(f"收集数据异常 {symbol} from {source.value}: {e}")
                await self._update_stats(None, success=False, source=source)
                return None
    
    async def _collect_from_tushare(self, symbol: str) -> Optional[Dict[str, Any]]:
        """从Tushare收集数据"""
        try:
            # 模拟Tushare API调用
            # 实际使用时需要安装tushare包并配置token
            data = {
                'symbol': symbol,
                'price': float(Decimal('100.0') + Decimal(str(hash(symbol) % 100)) / Decimal('10')),
                'volume': hash(symbol) % 1000000,
                'timestamp': datetime.now().isoformat(),
                'source': 'tushare'
            }
            
            # 模拟网络延迟
            await asyncio.sleep(0.1)
            
            return data
            
        except Exception as e:
            self.logger.error(f"Tushare数据收集失败 {symbol}: {e}")
            return None
    
    async def _collect_from_akshare(self, symbol: str) -> Optional[Dict[str, Any]]:
        """从AKShare收集数据"""
        try:
            # 模拟AKShare API调用
            data = {
                'symbol': symbol,
                'price': float(Decimal('95.0') + Decimal(str(hash(symbol) % 50)) / Decimal('10')),
                'volume': hash(symbol) % 800000,
                'timestamp': datetime.now().isoformat(),
                'source': 'akshare'
            }
            
            await asyncio.sleep(0.15)
            return data
            
        except Exception as e:
            self.logger.error(f"AKShare数据收集失败 {symbol}: {e}")
            return None
    
    async def _collect_from_sina(self, symbol: str) -> Optional[Dict[str, Any]]:
        """从新浪财经收集数据"""
        try:
            # 构造新浪财经API URL（示例）
            url = f"http://hq.sinajs.cn/list={symbol}"
            
            # 使用标准库进行HTTP请求
            def _sync_request():
                try:
                    req = urllib.request.Request(url)
                    with urllib.request.urlopen(req, timeout=self.http_timeout) as response:
                        if response.status == 200:
                            text = response.read().decode('utf-8')
                            # 解析新浪财经数据格式
                            if 'var hq_str_' in text:
                                data_str = text.split('"')[1]
                                fields = data_str.split(',')
                                
                                if len(fields) >= 4:
                                    return {
                                        'symbol': symbol,
                                        'price': float(fields[3]) if fields[3] else 0.0,
                                        'volume': int(fields[8]) if len(fields) > 8 and fields[8] else 0,
                                        'timestamp': datetime.now().isoformat(),
                                        'source': 'sina'
                                    }
                    return None
                except Exception:
                    return None
            
            # 在线程池中执行同步请求
            loop = asyncio.get_event_loop()
            result = await loop.run_in_executor(None, _sync_request)
            return result
            
        except Exception as e:
            self.logger.error(f"新浪财经数据收集失败 {symbol}: {e}")
            return None
    
    async def _collect_from_tencent(self, symbol: str) -> Optional[Dict[str, Any]]:
        """从腾讯财经收集数据"""
        try:
            # 模拟腾讯财经API调用
            data = {
                'symbol': symbol,
                'price': float(Decimal('98.0') + Decimal(str(hash(symbol) % 30)) / Decimal('10')),
                'volume': hash(symbol) % 600000,
                'timestamp': datetime.now().isoformat(),
                'source': 'tencent'
            }
            
            await asyncio.sleep(0.12)
            return data
            
        except Exception as e:
            self.logger.error(f"腾讯财经数据收集失败 {symbol}: {e}")
            return None
    
    async def _collect_from_custom(self, symbol: str) -> Optional[Dict[str, Any]]:
        """从自定义数据源收集数据"""
        try:
            # 自定义数据源逻辑
            data = {
                'symbol': symbol,
                'price': float(Decimal('102.0') + Decimal(str(hash(symbol) % 20)) / Decimal('10')),
                'volume': hash(symbol) % 500000,
                'timestamp': datetime.now().isoformat(),
                'source': 'custom'
            }
            
            return data
            
        except Exception as e:
            self.logger.error(f"自定义数据源收集失败 {symbol}: {e}")
            return None
    
    async def _assess_data_quality(self, data: Dict[str, Any], source: DataSource) -> DataQuality:
        """评估数据质量"""
        try:
            score = 100
            
            # 检查必要字段
            required_fields = ['symbol', 'price', 'timestamp']
            for field in required_fields:
                if field not in data or data[field] is None:
                    score -= 30
            
            # 检查价格合理性
            if 'price' in data:
                price = data['price']
                if isinstance(price, (int, float)) and price <= 0:
                    score -= 20
                elif not isinstance(price, (int, float)):
                    score -= 25
            
            # 检查成交量合理性
            if 'volume' in data:
                volume = data['volume']
                if isinstance(volume, (int, float)) and volume < 0:
                    score -= 15
            
            # 根据数据源调整评分
            source_weights = {
                DataSource.TUSHARE: 1.0,
                DataSource.WIND: 1.0,
                DataSource.AKSHARE: 0.9,
                DataSource.SINA: 0.8,
                DataSource.TENCENT: 0.8,
                DataSource.CUSTOM: 0.7
            }
            
            score = int(score * source_weights.get(source, 0.5))
            
            # 转换为质量等级
            if score >= 90:
                return DataQuality.EXCELLENT
            elif score >= 75:
                return DataQuality.GOOD
            elif score >= 60:
                return DataQuality.FAIR
            elif score >= 40:
                return DataQuality.POOR
            else:
                return DataQuality.INVALID
                
        except Exception as e:
            self.logger.error(f"数据质量评估失败: {e}")
            return DataQuality.POOR
    
    async def _check_cache(self, symbol: str, source: DataSource) -> bool:
        """检查缓存"""
        if symbol not in self.data_cache:
            return False
        
        if source.value not in self.data_cache[symbol]:
            return False
        
        cached_data = self.data_cache[symbol][source.value]
        cache_age = (datetime.now() - cached_data.timestamp).total_seconds()
        
        return cache_age < self.config.cache_ttl
    
    async def _cache_data(self, data_point: DataPoint):
        """缓存数据"""
        async with self._lock:
            if data_point.symbol not in self.data_cache:
                self.data_cache[data_point.symbol] = {}
            
            self.data_cache[data_point.symbol][data_point.source.value] = data_point
    
    async def _update_stats(self, data_point: Optional[DataPoint], success: bool, source: Optional[DataSource] = None):
        """更新统计信息"""
        async with self._lock:
            self.collection_stats.total_requests += 1
            
            if success and data_point:
                self.collection_stats.successful_requests += 1
                
                # 更新延迟统计
                total_latency = (self.collection_stats.average_latency * 
                               (self.collection_stats.successful_requests - 1) + 
                               data_point.latency)
                self.collection_stats.average_latency = total_latency / self.collection_stats.successful_requests
                
                # 更新质量分布
                quality = data_point.quality
                if quality not in self.collection_stats.data_quality_distribution:
                    self.collection_stats.data_quality_distribution[quality] = 0
                self.collection_stats.data_quality_distribution[quality] += 1
                
                # 更新数据源统计
                source = data_point.source
                if source not in self.collection_stats.source_stats:
                    self.collection_stats.source_stats[source] = {'success': 0, 'failed': 0}
                self.collection_stats.source_stats[source]['success'] += 1
                
            else:
                self.collection_stats.failed_requests += 1
                if source:
                    if source not in self.collection_stats.source_stats:
                        self.collection_stats.source_stats[source] = {'success': 0, 'failed': 0}
                    self.collection_stats.source_stats[source]['failed'] += 1
            
            self.collection_stats.last_collection_time = datetime.now()
    
    async def _collection_loop(self):
        """收集循环"""
        while self.is_running:
            try:
                if not self.config.symbols:
                    await asyncio.sleep(self.config.collection_interval)
                    continue
                
                # 并发收集所有标的数据
                tasks = []
                for symbol in self.config.symbols:
                    task = asyncio.create_task(self.collect_data(symbol))
                    tasks.append((symbol, task))
                
                # 等待所有任务完成
                for symbol, task in tasks:
                    try:
                        results = await task
                        valid_results = {source: data for source, data in results.items() if data}
                        
                        if valid_results:
                            self.logger.debug(f"收集到数据: {symbol}, 数据源: {list(valid_results.keys())}")
                        else:
                            self.logger.warning(f"未收集到有效数据: {symbol}")
                            
                    except Exception as e:
                        self.logger.error(f"收集任务失败 {symbol}: {e}")
                
                await asyncio.sleep(self.config.collection_interval)
                
            except asyncio.CancelledError:
                break
            except Exception as e:
                self.logger.error(f"收集循环错误: {e}")
                await asyncio.sleep(1)
    
    async def get_latest_data(self, symbol: str, source: Optional[DataSource] = None) -> Optional[DataPoint]:
        """获取最新数据"""
        async with self._lock:
            if symbol not in self.data_cache:
                return None
            
            if source:
                return self.data_cache[symbol].get(source.value)
            else:
                # 返回质量最好的数据
                best_data = None
                best_quality = DataQuality.INVALID
                
                for data_point in self.data_cache[symbol].values():
                    if data_point.quality.value > best_quality.value:
                        best_data = data_point
                        best_quality = data_point.quality
                
                return best_data
    
    async def get_all_data(self, symbol: str) -> Dict[DataSource, DataPoint]:
        """获取所有数据源的数据"""
        async with self._lock:
            if symbol not in self.data_cache:
                return {}
            
            return {
                DataSource(source): data 
                for source, data in self.data_cache[symbol].items()
            }
    
    async def add_symbol(self, symbol: str):
        """添加监控标的"""
        if symbol not in self.config.symbols:
            self.config.symbols.append(symbol)
            self.logger.info(f"添加收集标的: {symbol}")
    
    async def remove_symbol(self, symbol: str):
        """移除监控标的"""
        if symbol in self.config.symbols:
            self.config.symbols.remove(symbol)
            # 清理缓存
            async with self._lock:
                self.data_cache.pop(symbol, None)
            self.logger.info(f"移除收集标的: {symbol}")
    
    async def get_statistics(self) -> Dict[str, Any]:
        """获取收集统计"""
        async with self._lock:
            stats = self.collection_stats.to_dict()
            stats.update({
                'cached_symbols': len(self.data_cache),
                'configured_symbols': len(self.config.symbols),
                'configured_sources': [s.value for s in self.config.data_sources],
                'is_running': self.is_running
            })
            return stats
    
    async def clear_cache(self):
        """清理缓存"""
        async with self._lock:
            self.data_cache.clear()
            self.logger.info("数据缓存已清理")