"""
数据统计分析服务
实现人流量统计、热力图生成、趋势分析、报表生成等功能
"""
import asyncio
import logging
import numpy as np
import pandas as pd
from datetime import datetime, timedelta, date
from typing import List, Dict, Optional, Tuple, Any, Union
from uuid import uuid4
import time
from collections import defaultdict, deque
import json
import math
from scipy import stats
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import DBSCAN

from schemas.data_analysis import (
    TimeSeries, DataPoint, StatisticalSummary, HeatmapData, TrendAnalysis,
    ChartData, ChartConfig, Report, ReportSection, DataQuery, Dashboard,
    TimeGranularity, ChartType, AggregationType, ReportType, PerformanceMetrics
)
from schemas.person_detection import PersonDetectionResult
from schemas.pose_detection import PoseAnalysisResult, BehaviorAlert
from core.config import get_settings

logger = logging.getLogger(__name__)
settings = get_settings()


class DataCollector:
    """数据收集器"""
    def __init__(self):
        self.person_detection_data: deque = deque(maxlen=10000)
        self.pose_analysis_data: deque = deque(maxlen=10000)
        self.behavior_alert_data: deque = deque(maxlen=5000)
        self.performance_metrics: deque = deque(maxlen=5000)
        
        # 实时统计缓存
        self.real_time_stats: Dict[str, Any] = {}
        self.last_update = datetime.now()

    def add_person_detection_data(self, data: PersonDetectionResult):
        """添加人员检测数据"""
        self.person_detection_data.append({
            'timestamp': data.timestamp,
            'camera_id': data.camera_id,
            'total_count': data.total_count,
            'confidence_threshold': data.confidence_threshold,
            'processing_time': data.processing_time,
            'persons': data.persons
        })
        self._update_real_time_stats()

    def add_pose_analysis_data(self, data: PoseAnalysisResult):
        """添加姿态分析数据"""
        self.pose_analysis_data.append({
            'timestamp': data.timestamp,
            'camera_id': data.camera_id,
            'total_persons': data.total_persons,
            'processing_time': data.processing_time,
            'poses': len(data.poses),
            'actions': len(data.actions),
            'behavior_patterns': len(data.behavior_patterns)
        })
        self._update_real_time_stats()

    def add_behavior_alert_data(self, data: BehaviorAlert):
        """添加行为告警数据"""
        self.behavior_alert_data.append({
            'timestamp': data.timestamp,
            'camera_id': data.camera_id,
            'behavior_type': data.behavior_type,
            'severity': data.severity.value,
            'confidence': data.confidence,
            'person_ids': data.person_ids,
            'resolved': data.resolved
        })
        self._update_real_time_stats()

    def add_performance_metric(self, metric: PerformanceMetrics):
        """添加性能指标"""
        self.performance_metrics.append({
            'timestamp': metric.timestamp,
            'name': metric.name,
            'value': metric.value,
            'unit': metric.unit,
            'tags': metric.tags
        })

    def _update_real_time_stats(self):
        """更新实时统计"""
        current_time = datetime.now()
        # 每分钟更新一次
        if (current_time - self.last_update).total_seconds() < 60:
            return

        # 计算最近一小时的统计
        one_hour_ago = current_time - timedelta(hours=1)
        
        # 人员检测统计
        recent_detections = [
            d for d in self.person_detection_data 
            if d['timestamp'] >= one_hour_ago
        ]
        
        if recent_detections:
            total_persons = sum(d['total_count'] for d in recent_detections)
            avg_persons = total_persons / len(recent_detections)
            max_persons = max(d['total_count'] for d in recent_detections)
            
            self.real_time_stats['person_detection'] = {
                'total_detections': len(recent_detections),
                'total_persons': total_persons,
                'average_persons': avg_persons,
                'max_persons': max_persons,
                'avg_processing_time': np.mean([d['processing_time'] for d in recent_detections])
            }

        # 告警统计
        recent_alerts = [
            a for a in self.behavior_alert_data 
            if a['timestamp'] >= one_hour_ago
        ]
        
        if recent_alerts:
            alert_counts = defaultdict(int)
            for alert in recent_alerts:
                alert_counts[alert['severity']] += 1
                
            self.real_time_stats['alerts'] = {
                'total_alerts': len(recent_alerts),
                'severity_distribution': dict(alert_counts),
                'unresolved_alerts': len([a for a in recent_alerts if not a['resolved']])
            }

        self.last_update = current_time

    def get_real_time_stats(self) -> Dict[str, Any]:
        """获取实时统计"""
        return self.real_time_stats.copy()


class StatisticalAnalyzer:
    """统计分析器"""
    def __init__(self):
        pass

    def calculate_summary(self, values: List[float]) -> StatisticalSummary:
        """计算统计摘要"""
        if not values:
            return StatisticalSummary(
                count=0, sum=0, mean=0, median=0, std=0,
                min=0, max=0, percentile_25=0, percentile_75=0
            )

        values_array = np.array(values)
        return StatisticalSummary(
            count=len(values),
            sum=float(np.sum(values_array)),
            mean=float(np.mean(values_array)),
            median=float(np.median(values_array)),
            std=float(np.std(values_array)),
            min=float(np.min(values_array)),
            max=float(np.max(values_array)),
            percentile_25=float(np.percentile(values_array, 25)),
            percentile_75=float(np.percentile(values_array, 75))
        )

    def create_time_series(
        self, 
        data: List[Dict[str, Any]], 
        value_field: str,
        time_field: str = 'timestamp',
        granularity: TimeGranularity = TimeGranularity.HOUR,
        aggregation: AggregationType = AggregationType.COUNT
    ) -> TimeSeries:
        """创建时间序列"""
        if not data:
            return TimeSeries(
                series_id=str(uuid4()),
                name="Empty Series",
                data_points=[],
                granularity=granularity,
                start_time=datetime.now(),
                end_time=datetime.now()
            )

        # 按时间粒度分组数据
        grouped_data = self._group_by_time(data, time_field, granularity)
        
        # 聚合数据
        data_points = []
        for time_key, group_data in grouped_data.items():
            values = [item[value_field] for item in group_data if value_field in item]
            if values:
                if aggregation == AggregationType.COUNT:
                    value = len(values)
                elif aggregation == AggregationType.SUM:
                    value = sum(values)
                elif aggregation == AggregationType.AVERAGE:
                    value = np.mean(values)
                elif aggregation == AggregationType.MIN:
                    value = min(values)
                elif aggregation == AggregationType.MAX:
                    value = max(values)
                elif aggregation == AggregationType.MEDIAN:
                    value = np.median(values)
                else:
                    value = len(values)

                data_points.append(DataPoint(
                    timestamp=time_key,
                    value=value,
                    metadata={'count': len(group_data)}
                ))

        # 按时间排序
        data_points.sort(key=lambda x: x.timestamp)

        return TimeSeries(
            series_id=str(uuid4()),
            name=f"{value_field}_{aggregation.value}",
            data_points=data_points,
            granularity=granularity,
            start_time=data_points[0].timestamp if data_points else datetime.now(),
            end_time=data_points[-1].timestamp if data_points else datetime.now(),
            unit=self._get_unit_for_field(value_field)
        )

    def _group_by_time(
        self, 
        data: List[Dict[str, Any]], 
        time_field: str, 
        granularity: TimeGranularity
    ) -> Dict[datetime, List[Dict[str, Any]]]:
        """按时间粒度分组数据"""
        grouped = defaultdict(list)
        
        for item in data:
            timestamp = item.get(time_field)
            if not isinstance(timestamp, datetime):
                continue

            # 根据粒度调整时间戳
            if granularity == TimeGranularity.MINUTE:
                key = timestamp.replace(second=0, microsecond=0)
            elif granularity == TimeGranularity.HOUR:
                key = timestamp.replace(minute=0, second=0, microsecond=0)
            elif granularity == TimeGranularity.DAY:
                key = timestamp.replace(hour=0, minute=0, second=0, microsecond=0)
            elif granularity == TimeGranularity.WEEK:
                days_since_monday = timestamp.weekday()
                key = (timestamp - timedelta(days=days_since_monday)).replace(
                    hour=0, minute=0, second=0, microsecond=0
                )
            elif granularity == TimeGranularity.MONTH:
                key = timestamp.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
            elif granularity == TimeGranularity.YEAR:
                key = timestamp.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
            else:
                key = timestamp

            grouped[key].append(item)

        return grouped

    def _get_unit_for_field(self, field: str) -> Optional[str]:
        """获取字段的单位"""
        unit_mapping = {
            'total_count': '人',
            'person_count': '人',
            'processing_time': '秒',
            'confidence': '%',
            'temperature': '°C',
            'humidity': '%'
        }
        return unit_mapping.get(field)

    def detect_anomalies(self, series: TimeSeries, threshold: float = 2.0) -> List[Dict[str, Any]]:
        """检测异常值"""
        if len(series.data_points) < 10:
            return []

        values = [point.value for point in series.data_points]
        mean_val = np.mean(values)
        std_val = np.std(values)

        anomalies = []
        for i, point in enumerate(series.data_points):
            z_score = abs(point.value - mean_val) / std_val if std_val > 0 else 0
            if z_score > threshold:
                anomalies.append({
                    'index': i,
                    'timestamp': point.timestamp,
                    'value': point.value,
                    'z_score': z_score,
                    'type': 'high' if point.value > mean_val else 'low'
                })

        return anomalies


class DataAnalysisService:
    """数据分析服务"""
    def __init__(self):
        self.data_collector = DataCollector()
        self.statistical_analyzer = StatisticalAnalyzer()
        
        # 缓存
        self.chart_cache: Dict[str, ChartData] = {}
        self.report_cache: Dict[str, Report] = {}
        
        # 服务状态
        self._running = False
        self._analysis_task = None
        self.analysis_interval = 300.0  # 5分钟分析一次

    async def start_service(self):
        """启动服务"""
        if self._running:
            return
        
        self._running = True
        self._analysis_task = asyncio.create_task(self._analysis_loop())
        logger.info("Data analysis service started")

    async def stop_service(self):
        """停止服务"""
        self._running = False
        if self._analysis_task:
            self._analysis_task.cancel()
            try:
                await self._analysis_task
            except asyncio.CancelledError:
                pass
        logger.info("Data analysis service stopped")

    async def _analysis_loop(self):
        """分析循环"""
        while self._running:
            try:
                await self._periodic_analysis()
                await asyncio.sleep(self.analysis_interval)
            except asyncio.CancelledError:
                break
            except Exception as e:
                logger.error(f"Error in analysis loop: {e}")
                await asyncio.sleep(60.0)

    async def _periodic_analysis(self):
        """周期性分析"""
        # 清理过期缓存
        current_time = datetime.now()
        
        # 清理1小时前的图表缓存
        expired_charts = [
            chart_id for chart_id, chart in self.chart_cache.items()
            if (current_time - chart.updated_at).total_seconds() > 3600
        ]
        for chart_id in expired_charts:
            del self.chart_cache[chart_id]

        # 清理1天前的报告缓存
        expired_reports = [
            report_id for report_id, report in self.report_cache.items()
            if (current_time - report.generated_at).total_seconds() > 86400
        ]
        for report_id in expired_reports:
            del self.report_cache[report_id]

    # 数据收集接口
    def add_person_detection_data(self, data: PersonDetectionResult):
        """添加人员检测数据"""
        self.data_collector.add_person_detection_data(data)

    def add_pose_analysis_data(self, data: PoseAnalysisResult):
        """添加姿态分析数据"""
        self.data_collector.add_pose_analysis_data(data)

    def add_behavior_alert_data(self, data: BehaviorAlert):
        """添加行为告警数据"""
        self.data_collector.add_behavior_alert_data(data)

    def add_performance_metric(self, metric: PerformanceMetrics):
        """添加性能指标"""
        self.data_collector.add_performance_metric(metric)

    # 统计分析接口
    async def get_person_count_statistics(
        self, 
        start_time: datetime,
        end_time: datetime,
        camera_id: Optional[str] = None,
        granularity: TimeGranularity = TimeGranularity.HOUR
    ) -> TimeSeries:
        """获取人员数量统计"""
        # 过滤时间范围内的数据
        filtered_data = [
            d for d in self.data_collector.person_detection_data
            if start_time <= d['timestamp'] <= end_time
            and (camera_id is None or d['camera_id'] == camera_id)
        ]
        
        return self.statistical_analyzer.create_time_series(
            filtered_data, 
            'total_count', 
            granularity=granularity,
            aggregation=AggregationType.AVERAGE
        )

    async def get_statistical_summary(
        self, 
        start_time: datetime,
        end_time: datetime,
        metric: str = 'total_count',
        camera_id: Optional[str] = None
    ) -> StatisticalSummary:
        """获取统计摘要"""
        # 过滤数据
        if metric == 'total_count':
            filtered_data = [
                d for d in self.data_collector.person_detection_data
                if start_time <= d['timestamp'] <= end_time
                and (camera_id is None or d['camera_id'] == camera_id)
            ]
            values = [d['total_count'] for d in filtered_data]
        elif metric == 'processing_time':
            filtered_data = [
                d for d in self.data_collector.person_detection_data
                if start_time <= d['timestamp'] <= end_time
                and (camera_id is None or d['camera_id'] == camera_id)
            ]
            values = [d['processing_time'] for d in filtered_data]
        else:
            values = []
        
        return self.statistical_analyzer.calculate_summary(values)

    # 实时统计接口
    async def get_real_time_stats(self) -> Dict[str, Any]:
        """获取实时统计"""
        return self.data_collector.get_real_time_stats()

    async def get_current_person_count(self, camera_id: Optional[str] = None) -> int:
        """获取当前人员数量"""
        current_time = datetime.now()
        recent_time = current_time - timedelta(minutes=5)  # 最近5分钟
        
        recent_data = [
            d for d in self.data_collector.person_detection_data
            if d['timestamp'] >= recent_time
            and (camera_id is None or d['camera_id'] == camera_id)
        ]
        
        if not recent_data:
            return 0
        
        # 返回最新的检测结果
        latest_data = max(recent_data, key=lambda x: x['timestamp'])
        return latest_data['total_count']

    async def get_alert_summary(self, hours: int = 24) -> Dict[str, Any]:
        """获取告警摘要"""
        current_time = datetime.now()
        start_time = current_time - timedelta(hours=hours)
        
        recent_alerts = [
            a for a in self.data_collector.behavior_alert_data
            if a['timestamp'] >= start_time
        ]
        
        # 按严重程度统计
        severity_counts = defaultdict(int)
        behavior_counts = defaultdict(int)
        unresolved_count = 0
        
        for alert in recent_alerts:
            severity_counts[alert['severity']] += 1
            behavior_counts[alert['behavior_type']] += 1
            if not alert['resolved']:
                unresolved_count += 1
        
        return {
            'total_alerts': len(recent_alerts),
            'unresolved_alerts': unresolved_count,
            'severity_distribution': dict(severity_counts),
            'behavior_distribution': dict(behavior_counts),
            'period_hours': hours
        }


# 全局服务实例
data_analysis_service = DataAnalysisService()


# 服务启动和停止函数
async def start_data_analysis_service():
    """启动数据分析服务"""
    await data_analysis_service.start_service()


async def stop_data_analysis_service():
    """停止数据分析服务"""
    await data_analysis_service.stop_service()


async def get_data_analysis_service() -> DataAnalysisService:
    """获取数据分析服务实例"""
    return data_analysis_service