"""
Akshare数据同步服务
提供智能数据同步、质量控制和实时更新功能
"""

import asyncio
import logging
import time
from typing import Dict, List, Optional, Any, Callable
from datetime import datetime, timezone, timedelta
from dataclasses import dataclass, field
from enum import Enum
import pandas as pd
import akshare as ak
from contextlib import asynccontextmanager

from .core.config import config
from .core.database import db_manager, DataSyncLog
from .core.cache import cache_manager
from .core.exceptions import DataSyncException, DataQualityException

# 为了向后兼容，创建别名
DataSyncError = DataSyncException
DataQualityError = DataQualityException
from .models.stock import Stock, StockPrice, StockInfo
from .models.index import Index, IndexPrice
from .models.fund import Fund, FundPrice, FundInfo
from .models.bond import Bond, BondPrice, BondInfo

logger = logging.getLogger(__name__)


class SyncType(Enum):
    """同步类型枚举"""
    REALTIME = "realtime"      # 实时数据（1分钟）
    DAILY = "daily"            # 日频数据（1小时）
    WEEKLY = "weekly"          # 低频数据（1天）


class DataSource(Enum):
    """数据源枚举"""
    STOCK_REALTIME = "stock_realtime"
    STOCK_DAILY = "stock_daily"
    STOCK_INFO = "stock_info"
    INDEX_REALTIME = "index_realtime"
    INDEX_DAILY = "index_daily"
    FUND_REALTIME = "fund_realtime"
    FUND_DAILY = "fund_daily"
    FUND_INFO = "fund_info"
    BOND_REALTIME = "bond_realtime"
    BOND_DAILY = "bond_daily"
    BOND_INFO = "bond_info"


@dataclass
class SyncTask:
    """同步任务配置"""
    data_source: DataSource
    sync_type: SyncType
    akshare_func: str
    params: Dict[str, Any] = field(default_factory=dict)
    interval: int = 60  # 同步间隔（秒）
    enabled: bool = True
    retry_count: int = 0
    last_sync: Optional[datetime] = None
    next_sync: Optional[datetime] = None


@dataclass
class DataQualityMetrics:
    """数据质量指标"""
    completeness: float = 0.0      # 完整性
    accuracy: float = 0.0          # 准确性
    consistency: float = 0.0       # 一致性
    timeliness: float = 0.0        # 及时性
    validity: float = 0.0          # 有效性
    
    @property
    def overall_score(self) -> float:
        """综合质量分数"""
        return (self.completeness + self.accuracy + self.consistency + 
                self.timeliness + self.validity) / 5


class DataQualityChecker:
    """数据质量检查器"""
    
    def __init__(self):
        self.logger = logging.getLogger(f"{__name__}.DataQualityChecker")
    
    async def check_data_quality(self, data: pd.DataFrame, 
                                data_source: DataSource) -> DataQualityMetrics:
        """检查数据质量"""
        metrics = DataQualityMetrics()
        
        try:
            # 完整性检查
            metrics.completeness = await self._check_completeness(data)
            
            # 准确性检查
            metrics.accuracy = await self._check_accuracy(data, data_source)
            
            # 一致性检查
            metrics.consistency = await self._check_consistency(data, data_source)
            
            # 及时性检查
            metrics.timeliness = await self._check_timeliness(data, data_source)
            
            # 有效性检查
            metrics.validity = await self._check_validity(data, data_source)
            
            self.logger.info(f"数据质量检查完成: {data_source.value}, 综合分数: {metrics.overall_score:.2f}")
            
        except Exception as e:
            self.logger.error(f"数据质量检查失败: {e}")
            raise DataQualityError(f"数据质量检查失败: {e}")
        
        return metrics
    
    async def _check_completeness(self, data: pd.DataFrame) -> float:
        """检查数据完整性"""
        if data.empty:
            return 0.0
        
        total_cells = data.size
        non_null_cells = data.count().sum()
        return non_null_cells / total_cells if total_cells > 0 else 0.0
    
    async def _check_accuracy(self, data: pd.DataFrame, data_source: DataSource) -> float:
        """检查数据准确性"""
        if data.empty:
            return 0.0
        
        accuracy_score = 1.0
        
        # 价格数据准确性检查
        if 'price' in data.columns or 'close' in data.columns:
            price_col = 'price' if 'price' in data.columns else 'close'
            # 检查价格是否为正数
            positive_prices = (data[price_col] > 0).sum()
            accuracy_score *= positive_prices / len(data)
        
        # 成交量准确性检查
        if 'volume' in data.columns:
            # 检查成交量是否为非负数
            valid_volumes = (data['volume'] >= 0).sum()
            accuracy_score *= valid_volumes / len(data)
        
        return accuracy_score
    
    async def _check_consistency(self, data: pd.DataFrame, data_source: DataSource) -> float:
        """检查数据一致性"""
        if data.empty:
            return 0.0
        
        consistency_score = 1.0
        
        # 检查OHLC数据一致性
        if all(col in data.columns for col in ['open', 'high', 'low', 'close']):
            # high >= max(open, close) and low <= min(open, close)
            valid_high = (data['high'] >= data[['open', 'close']].max(axis=1)).sum()
            valid_low = (data['low'] <= data[['open', 'close']].min(axis=1)).sum()
            consistency_score = (valid_high + valid_low) / (2 * len(data))
        
        return consistency_score
    
    async def _check_timeliness(self, data: pd.DataFrame, data_source: DataSource) -> float:
        """检查数据及时性"""
        if data.empty:
            return 0.0
        
        # 检查数据时间戳是否在合理范围内
        if 'date' in data.columns or 'timestamp' in data.columns:
            time_col = 'date' if 'date' in data.columns else 'timestamp'
            now = datetime.now()
            
            # 实时数据应该在5分钟内
            if data_source.value.endswith('realtime'):
                threshold = timedelta(minutes=5)
            # 日频数据应该在1天内
            elif data_source.value.endswith('daily'):
                threshold = timedelta(days=1)
            else:
                threshold = timedelta(days=7)
            
            try:
                latest_time = pd.to_datetime(data[time_col]).max()
                if pd.isna(latest_time):
                    return 0.5
                
                time_diff = now - latest_time.to_pydatetime()
                return 1.0 if time_diff <= threshold else 0.5
            except:
                return 0.5
        
        return 1.0
    
    async def _check_validity(self, data: pd.DataFrame, data_source: DataSource) -> float:
        """检查数据有效性"""
        if data.empty:
            return 0.0
        
        validity_score = 1.0
        
        # 检查股票代码格式
        if 'symbol' in data.columns or 'code' in data.columns:
            code_col = 'symbol' if 'symbol' in data.columns else 'code'
            # 简单的股票代码格式检查
            valid_codes = data[code_col].astype(str).str.match(r'^\d{6}$').sum()
            validity_score *= valid_codes / len(data)
        
        return validity_score


class AkshareDataSyncService:
    """Akshare数据同步服务"""
    
    def __init__(self):
        self.logger = logging.getLogger(f"{__name__}.AkshareDataSyncService")
        self.quality_checker = DataQualityChecker()
        self.cache = None
        self.sync_tasks: Dict[str, SyncTask] = {}
        self.running_tasks: Dict[str, asyncio.Task] = {}
        self._initialized = False
        self._stop_event = asyncio.Event()
        
        # 初始化同步任务配置
        self._init_sync_tasks()
    
    def _init_sync_tasks(self):
        """初始化同步任务配置"""
        # 股票实时数据
        self.sync_tasks["stock_realtime"] = SyncTask(
            data_source=DataSource.STOCK_REALTIME,
            sync_type=SyncType.REALTIME,
            akshare_func="stock_zh_a_spot_em",
            interval=config.data_sync.realtime_interval
        )
        
        # 股票日频数据
        self.sync_tasks["stock_daily"] = SyncTask(
            data_source=DataSource.STOCK_DAILY,
            sync_type=SyncType.DAILY,
            akshare_func="stock_zh_a_hist",
            interval=config.data_sync.daily_interval
        )
        
        # 股票基本信息
        self.sync_tasks["stock_info"] = SyncTask(
            data_source=DataSource.STOCK_INFO,
            sync_type=SyncType.WEEKLY,
            akshare_func="stock_info_a_code_name",
            interval=config.data_sync.weekly_interval
        )
        
        # 指数实时数据
        self.sync_tasks["index_realtime"] = SyncTask(
            data_source=DataSource.INDEX_REALTIME,
            sync_type=SyncType.REALTIME,
            akshare_func="index_zh_a_hist",
            interval=config.data_sync.realtime_interval
        )
        
        # 基金实时数据
        self.sync_tasks["fund_realtime"] = SyncTask(
            data_source=DataSource.FUND_REALTIME,
            sync_type=SyncType.REALTIME,
            akshare_func="fund_etf_spot_em",
            interval=config.data_sync.realtime_interval
        )
        
        # 债券实时数据
        self.sync_tasks["bond_realtime"] = SyncTask(
            data_source=DataSource.BOND_REALTIME,
            sync_type=SyncType.REALTIME,
            akshare_func="bond_spot_deal",
            interval=config.data_sync.realtime_interval
        )
    
    async def initialize(self):
        """初始化数据同步服务"""
        if self._initialized:
            return
        
        try:
            # 初始化缓存
            self.cache = cache_manager.get_cache("data_sync")
            
            # 设置下次同步时间
            now = datetime.now(timezone.utc)
            for task in self.sync_tasks.values():
                task.next_sync = now + timedelta(seconds=task.interval)
            
            self._initialized = True
            self.logger.info("数据同步服务初始化完成")
            
        except Exception as e:
            self.logger.error(f"数据同步服务初始化失败: {e}")
            raise DataSyncError(f"数据同步服务初始化失败: {e}")
    
    async def start(self):
        """启动数据同步服务"""
        if not self._initialized:
            await self.initialize()
        
        self.logger.info("启动数据同步服务")
        
        # 启动所有同步任务
        for task_name, task in self.sync_tasks.items():
            if task.enabled:
                self.running_tasks[task_name] = asyncio.create_task(
                    self._sync_task_loop(task_name, task)
                )
        
        # 启动监控任务
        self.running_tasks["monitor"] = asyncio.create_task(self._monitor_loop())
    
    async def stop(self):
        """停止数据同步服务"""
        self.logger.info("停止数据同步服务")
        self._stop_event.set()
        
        # 取消所有运行中的任务
        for task in self.running_tasks.values():
            task.cancel()
        
        # 等待任务完成
        if self.running_tasks:
            await asyncio.gather(*self.running_tasks.values(), return_exceptions=True)
        
        self.running_tasks.clear()
    
    async def _sync_task_loop(self, task_name: str, task: SyncTask):
        """同步任务循环"""
        while not self._stop_event.is_set():
            try:
                now = datetime.now(timezone.utc)
                
                # 检查是否到了同步时间
                if task.next_sync and now >= task.next_sync:
                    await self._execute_sync_task(task_name, task)
                    
                    # 设置下次同步时间
                    task.next_sync = now + timedelta(seconds=task.interval)
                    task.last_sync = now
                    task.retry_count = 0
                
                # 等待一段时间再检查
                await asyncio.sleep(10)
                
            except asyncio.CancelledError:
                break
            except Exception as e:
                self.logger.error(f"同步任务循环错误 {task_name}: {e}")
                task.retry_count += 1
                
                # 如果重试次数过多，延长下次同步时间
                if task.retry_count >= config.data_sync.max_retries:
                    task.next_sync = datetime.now(timezone.utc) + timedelta(
                        seconds=task.interval * 2
                    )
                    task.retry_count = 0
                
                await asyncio.sleep(config.data_sync.retry_delay)
    
    async def _execute_sync_task(self, task_name: str, task: SyncTask):
        """执行同步任务"""
        start_time = time.time()
        sync_log = None
        
        try:
            self.logger.info(f"开始执行同步任务: {task_name}")
            
            # 创建同步日志
            async with db_manager.get_session() as session:
                sync_log = DataSyncLog(
                    data_source=task.data_source.value,
                    sync_type=task.sync_type.value,
                    status="running"
                )
                session.add(sync_log)
                await session.commit()
            
            # 获取数据
            data = await self._fetch_data(task)
            
            if data is None or data.empty:
                raise DataSyncError("获取到空数据")
            
            # 数据质量检查
            if config.data_sync.quality_check_enabled:
                quality_metrics = await self.quality_checker.check_data_quality(
                    data, task.data_source
                )
                
                if quality_metrics.overall_score < config.data_sync.quality_threshold:
                    raise DataQualityError(
                        f"数据质量不达标: {quality_metrics.overall_score:.2f}"
                    )
            
            # 数据清洗和转换
            cleaned_data = await self._clean_and_transform_data(data, task.data_source)
            
            # 保存到数据库
            record_count = await self._save_data(cleaned_data, task.data_source)
            
            # 更新缓存
            await self._update_cache(cleaned_data, task.data_source)
            
            # 更新同步日志
            sync_duration = time.time() - start_time
            async with db_manager.get_session() as session:
                if sync_log:
                    sync_log.status = "success"
                    sync_log.record_count = record_count
                    sync_log.sync_duration = sync_duration
                    sync_log.sync_metadata = {
                        "data_quality": quality_metrics.__dict__ if config.data_sync.quality_check_enabled else None,
                        "task_config": {
                            "akshare_func": task.akshare_func,
                            "params": task.params
                        }
                    }
                    await session.commit()
            
            self.logger.info(
                f"同步任务完成: {task_name}, 记录数: {record_count}, "
                f"耗时: {sync_duration:.2f}秒"
            )
            
        except Exception as e:
            sync_duration = time.time() - start_time
            self.logger.error(f"同步任务失败 {task_name}: {e}")
            
            # 更新同步日志
            async with db_manager.get_session() as session:
                if sync_log:
                    sync_log.status = "failed"
                    sync_log.error_message = str(e)
                    sync_log.sync_duration = sync_duration
                    await session.commit()
            
            raise
    
    async def _fetch_data(self, task: SyncTask) -> Optional[pd.DataFrame]:
        """从akshare获取数据"""
        try:
            # 获取akshare函数
            akshare_func = getattr(ak, task.akshare_func)
            
            # 执行数据获取
            if task.params:
                data = akshare_func(**task.params)
            else:
                data = akshare_func()
            
            if isinstance(data, pd.DataFrame):
                return data
            else:
                self.logger.warning(f"akshare返回非DataFrame数据: {type(data)}")
                return None
                
        except Exception as e:
            self.logger.error(f"从akshare获取数据失败: {e}")
            raise DataSyncError(f"从akshare获取数据失败: {e}")
    
    async def _clean_and_transform_data(self, data: pd.DataFrame, 
                                       data_source: DataSource) -> pd.DataFrame:
        """数据清洗和转换"""
        try:
            # 复制数据避免修改原始数据
            cleaned_data = data.copy()
            
            # 通用清洗
            # 删除完全空的行
            cleaned_data = cleaned_data.dropna(how='all')
            
            # 删除重复行
            cleaned_data = cleaned_data.drop_duplicates()
            
            # 根据数据源进行特定清洗
            if data_source in [DataSource.STOCK_REALTIME, DataSource.STOCK_DAILY]:
                cleaned_data = await self._clean_stock_data(cleaned_data)
            elif data_source in [DataSource.INDEX_REALTIME]:
                cleaned_data = await self._clean_index_data(cleaned_data)
            elif data_source in [DataSource.FUND_REALTIME]:
                cleaned_data = await self._clean_fund_data(cleaned_data)
            elif data_source in [DataSource.BOND_REALTIME]:
                cleaned_data = await self._clean_bond_data(cleaned_data)
            
            return cleaned_data
            
        except Exception as e:
            self.logger.error(f"数据清洗失败: {e}")
            raise DataSyncError(f"数据清洗失败: {e}")
    
    async def _clean_stock_data(self, data: pd.DataFrame) -> pd.DataFrame:
        """清洗股票数据"""
        # 标准化列名
        column_mapping = {
            '代码': 'symbol',
            '名称': 'name',
            '最新价': 'price',
            '涨跌幅': 'change_pct',
            '涨跌额': 'change',
            '成交量': 'volume',
            '成交额': 'amount',
            '振幅': 'amplitude',
            '最高': 'high',
            '最低': 'low',
            '今开': 'open',
            '昨收': 'prev_close'
        }
        
        # 重命名列
        for old_name, new_name in column_mapping.items():
            if old_name in data.columns:
                data = data.rename(columns={old_name: new_name})
        
        # 数据类型转换
        numeric_columns = ['price', 'change_pct', 'change', 'volume', 'amount', 
                          'amplitude', 'high', 'low', 'open', 'prev_close']
        
        for col in numeric_columns:
            if col in data.columns:
                data[col] = pd.to_numeric(data[col], errors='coerce')
        
        # 过滤无效数据
        if 'price' in data.columns:
            data = data[data['price'] > 0]
        
        return data
    
    async def _clean_index_data(self, data: pd.DataFrame) -> pd.DataFrame:
        """清洗指数数据"""
        # 类似股票数据清洗逻辑
        return data
    
    async def _clean_fund_data(self, data: pd.DataFrame) -> pd.DataFrame:
        """清洗基金数据"""
        # 类似股票数据清洗逻辑
        return data
    
    async def _clean_bond_data(self, data: pd.DataFrame) -> pd.DataFrame:
        """清洗债券数据"""
        # 类似股票数据清洗逻辑
        return data
    
    async def _save_data(self, data: pd.DataFrame, data_source: DataSource) -> int:
        """保存数据到数据库"""
        try:
            # 根据数据源选择保存方法
            if data_source == DataSource.STOCK_REALTIME:
                return await self._save_stock_realtime_data(data)
            elif data_source == DataSource.STOCK_DAILY:
                return await self._save_stock_daily_data(data)
            elif data_source == DataSource.STOCK_INFO:
                return await self._save_stock_info_data(data)
            # 其他数据源的保存逻辑...
            
            return 0
            
        except Exception as e:
            self.logger.error(f"保存数据失败: {e}")
            raise DataSyncError(f"保存数据失败: {e}")
    
    async def _save_stock_realtime_data(self, data: pd.DataFrame) -> int:
        """保存股票实时数据"""
        records = []
        now = datetime.now(timezone.utc)
        
        for _, row in data.iterrows():
            record = {
                'symbol': row.get('symbol'),
                'price': row.get('price'),
                'change_pct': row.get('change_pct'),
                'change': row.get('change'),
                'volume': row.get('volume'),
                'amount': row.get('amount'),
                'high': row.get('high'),
                'low': row.get('low'),
                'open': row.get('open'),
                'prev_close': row.get('prev_close'),
                'timestamp': now,
                'created_at': now,
                'updated_at': now
            }
            records.append(record)
        
        # 批量插入
        return await db_manager.bulk_insert('stock_prices', records, 'ignore')
    
    async def _save_stock_daily_data(self, data: pd.DataFrame) -> int:
        """保存股票日频数据"""
        # 类似实时数据保存逻辑
        return 0
    
    async def _save_stock_info_data(self, data: pd.DataFrame) -> int:
        """保存股票基本信息"""
        # 类似实时数据保存逻辑
        return 0
    
    async def _update_cache(self, data: pd.DataFrame, data_source: DataSource):
        """更新缓存"""
        try:
            cache_key = f"{data_source.value}_latest"
            
            # 将DataFrame转换为可缓存的格式
            cache_data = {
                'data': data.to_dict('records'),
                'timestamp': datetime.now(timezone.utc).isoformat(),
                'count': len(data)
            }
            
            # 设置缓存，实时数据缓存5分钟，其他数据缓存1小时
            ttl = 300 if data_source.value.endswith('realtime') else 3600
            await self.cache.set(cache_key, cache_data, ttl)
            
        except Exception as e:
            self.logger.warning(f"更新缓存失败: {e}")
    
    async def _monitor_loop(self):
        """监控循环"""
        while not self._stop_event.is_set():
            try:
                # 检查任务状态
                await self._check_task_health()
                
                # 清理过期缓存
                await self._cleanup_cache()
                
                # 等待下次检查
                await asyncio.sleep(300)  # 5分钟检查一次
                
            except asyncio.CancelledError:
                break
            except Exception as e:
                self.logger.error(f"监控循环错误: {e}")
                await asyncio.sleep(60)
    
    async def _check_task_health(self):
        """检查任务健康状态"""
        now = datetime.now(timezone.utc)
        
        for task_name, task in self.sync_tasks.items():
            if not task.enabled:
                continue
            
            # 检查任务是否超时
            if task.last_sync:
                time_since_last = now - task.last_sync
                max_interval = timedelta(seconds=task.interval * 3)
                
                if time_since_last > max_interval:
                    self.logger.warning(
                        f"同步任务可能异常: {task_name}, "
                        f"上次同步: {task.last_sync}, "
                        f"间隔: {time_since_last}"
                    )
    
    async def _cleanup_cache(self):
        """清理过期缓存"""
        try:
            # 这里可以实现缓存清理逻辑
            pass
        except Exception as e:
            self.logger.warning(f"缓存清理失败: {e}")
    
    async def get_sync_status(self) -> Dict[str, Any]:
        """获取同步状态"""
        status = {
            'service_status': 'running' if not self._stop_event.is_set() else 'stopped',
            'tasks': {},
            'statistics': await self._get_sync_statistics()
        }
        
        for task_name, task in self.sync_tasks.items():
            status['tasks'][task_name] = {
                'enabled': task.enabled,
                'last_sync': task.last_sync.isoformat() if task.last_sync else None,
                'next_sync': task.next_sync.isoformat() if task.next_sync else None,
                'retry_count': task.retry_count,
                'data_source': task.data_source.value,
                'sync_type': task.sync_type.value
            }
        
        return status
    
    async def _get_sync_statistics(self) -> Dict[str, Any]:
        """获取同步统计信息"""
        try:
            # 查询最近24小时的同步统计
            query = """
                SELECT 
                    data_source,
                    status,
                    COUNT(*) as count,
                    AVG(sync_duration) as avg_duration,
                    SUM(record_count) as total_records
                FROM data_sync_logs 
                WHERE created_at >= NOW() - INTERVAL '24 hours'
                GROUP BY data_source, status
                ORDER BY data_source, status
            """
            
            results = await db_manager.execute_query(query)
            
            statistics = {}
            for record in results:
                source = record['data_source']
                if source not in statistics:
                    statistics[source] = {}
                
                statistics[source][record['status']] = {
                    'count': record['count'],
                    'avg_duration': float(record['avg_duration']) if record['avg_duration'] else 0,
                    'total_records': record['total_records'] or 0
                }
            
            return statistics
            
        except Exception as e:
            self.logger.error(f"获取同步统计失败: {e}")
            return {}


# 全局数据同步服务实例
data_sync_service = AkshareDataSyncService()


async def init_data_sync_service():
    """初始化数据同步服务"""
    await data_sync_service.initialize()


async def start_data_sync_service():
    """启动数据同步服务"""
    await data_sync_service.start()


async def stop_data_sync_service():
    """停止数据同步服务"""
    await data_sync_service.stop()