"""
KidsBuddy 1.0 时序数据记录系统
详细记录打断、干扰、穿插行为的毫秒级时序数据
"""

import sqlite3
import json
import time
import asyncio
from typing import Dict, List, Optional, Any, Union
from dataclasses import dataclass, asdict
from datetime import datetime
from pathlib import Path
from loguru import logger


@dataclass
class TimestampedData:
    """基础时间戳数据结构"""
    timestamp: float           # 精确到毫秒的时间戳
    session_id: str           # 作业会话ID
    student_id: str           # 学生ID
    data_type: str            # 数据类型
    data_value: Any           # 数据值
    confidence: float         # 数据置信度
    source_device: str        # 数据来源设备


@dataclass
class InterruptionEvent:
    """打断事件数据结构"""
    event_id: str
    session_id: str
    start_timestamp: float
    end_timestamp: Optional[float]
    interruption_type: str    # 洗澡、吃饭、休息等
    duration: Optional[float]
    recovery_time: Optional[float]  # 恢复学习状态所需时间
    impact_level: str         # low, medium, high
    context_data: Dict[str, Any]


@dataclass
class TaskSwitchEvent:
    """任务切换事件数据结构"""
    event_id: str
    session_id: str
    timestamp: float
    from_task: str
    to_task: str
    switch_reason: str
    switch_duration: float
    efficiency_impact: float


class TimeSeriesRecorder:
    """时序数据记录器"""

    def __init__(self, db_path: str = "data/kidsbuddy_timeseries.db"):
        self.db_path = Path(db_path)
        self.db_path.parent.mkdir(parents=True, exist_ok=True)

        # 数据缓冲区
        self._data_buffer = []
        self._buffer_size = 1000
        self._flush_interval = 10.0  # 10秒刷新一次

        # 运行状态
        self._recording = False
        self._flush_task = None

        # 初始化数据库
        self._init_database()

    def _init_database(self):
        """初始化数据库表结构"""
        try:
            with sqlite3.connect(self.db_path) as conn:
                cursor = conn.cursor()

                # 创建时序数据表
                cursor.execute('''
                    CREATE TABLE IF NOT EXISTS timestamped_data (
                        id INTEGER PRIMARY KEY AUTOINCREMENT,
                        timestamp REAL NOT NULL,
                        session_id TEXT NOT NULL,
                        student_id TEXT NOT NULL,
                        data_type TEXT NOT NULL,
                        data_value TEXT NOT NULL,
                        confidence REAL NOT NULL,
                        source_device TEXT NOT NULL,
                        created_at DATETIME DEFAULT CURRENT_TIMESTAMP
                    )
                ''')

                # 创建打断事件表
                cursor.execute('''
                    CREATE TABLE IF NOT EXISTS interruption_events (
                        id INTEGER PRIMARY KEY AUTOINCREMENT,
                        event_id TEXT UNIQUE NOT NULL,
                        session_id TEXT NOT NULL,
                        start_timestamp REAL NOT NULL,
                        end_timestamp REAL,
                        interruption_type TEXT NOT NULL,
                        duration REAL,
                        recovery_time REAL,
                        impact_level TEXT NOT NULL,
                        context_data TEXT,
                        created_at DATETIME DEFAULT CURRENT_TIMESTAMP
                    )
                ''')

                # 创建任务切换事件表
                cursor.execute('''
                    CREATE TABLE IF NOT EXISTS task_switch_events (
                        id INTEGER PRIMARY KEY AUTOINCREMENT,
                        event_id TEXT UNIQUE NOT NULL,
                        session_id TEXT NOT NULL,
                        timestamp REAL NOT NULL,
                        from_task TEXT NOT NULL,
                        to_task TEXT NOT NULL,
                        switch_reason TEXT NOT NULL,
                        switch_duration REAL NOT NULL,
                        efficiency_impact REAL NOT NULL,
                        created_at DATETIME DEFAULT CURRENT_TIMESTAMP
                    )
                ''')

                # 创建索引
                cursor.execute(
                    'CREATE INDEX IF NOT EXISTS idx_timestamp ON timestamped_data(timestamp)')
                cursor.execute(
                    'CREATE INDEX IF NOT EXISTS idx_session ON timestamped_data(session_id)')
                cursor.execute(
                    'CREATE INDEX IF NOT EXISTS idx_data_type ON timestamped_data(data_type)')
                cursor.execute(
                    'CREATE INDEX IF NOT EXISTS idx_interruption_session ON interruption_events(session_id)')
                cursor.execute(
                    'CREATE INDEX IF NOT EXISTS idx_task_switch_session ON task_switch_events(session_id)')

                conn.commit()
                logger.info("Database initialized successfully")

        except Exception as e:
            logger.error(f"Database initialization error: {e}")

    async def start_recording(self):
        """开始记录"""
        if self._recording:
            logger.warning("Recording already started")
            return

        self._recording = True
        self._flush_task = asyncio.create_task(self._periodic_flush())
        logger.info("Time series recording started")

    async def stop_recording(self):
        """停止记录"""
        if not self._recording:
            return

        self._recording = False

        if self._flush_task:
            self._flush_task.cancel()
            try:
                await self._flush_task
            except asyncio.CancelledError:
                pass

        # 最后一次刷新缓冲区
        await self._flush_buffer()
        logger.info("Time series recording stopped")

    async def record_data(self, data: TimestampedData):
        """记录时序数据"""
        if not self._recording:
            return

        # 添加到缓冲区
        self._data_buffer.append(data)

        # 如果缓冲区满了，立即刷新
        if len(self._data_buffer) >= self._buffer_size:
            await self._flush_buffer()

    async def record_interruption_event(self, event: InterruptionEvent):
        """记录打断事件"""
        try:
            with sqlite3.connect(self.db_path) as conn:
                cursor = conn.cursor()
                cursor.execute('''
                    INSERT OR REPLACE INTO interruption_events 
                    (event_id, session_id, start_timestamp, end_timestamp, 
                     interruption_type, duration, recovery_time, impact_level, context_data)
                    VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
                ''', (
                    event.event_id,
                    event.session_id,
                    event.start_timestamp,
                    event.end_timestamp,
                    event.interruption_type,
                    event.duration,
                    event.recovery_time,
                    event.impact_level,
                    json.dumps(event.context_data)
                ))
                conn.commit()
                logger.debug(f"Interruption event recorded: {event.event_id}")

        except Exception as e:
            logger.error(f"Failed to record interruption event: {e}")

    async def record_task_switch_event(self, event: TaskSwitchEvent):
        """记录任务切换事件"""
        try:
            with sqlite3.connect(self.db_path) as conn:
                cursor = conn.cursor()
                cursor.execute('''
                    INSERT OR REPLACE INTO task_switch_events 
                    (event_id, session_id, timestamp, from_task, to_task, 
                     switch_reason, switch_duration, efficiency_impact)
                    VALUES (?, ?, ?, ?, ?, ?, ?, ?)
                ''', (
                    event.event_id,
                    event.session_id,
                    event.timestamp,
                    event.from_task,
                    event.to_task,
                    event.switch_reason,
                    event.switch_duration,
                    event.efficiency_impact
                ))
                conn.commit()
                logger.debug(f"Task switch event recorded: {event.event_id}")

        except Exception as e:
            logger.error(f"Failed to record task switch event: {e}")

    async def _periodic_flush(self):
        """定期刷新缓冲区"""
        while self._recording:
            try:
                await asyncio.sleep(self._flush_interval)
                await self._flush_buffer()
            except asyncio.CancelledError:
                break
            except Exception as e:
                logger.error(f"Periodic flush error: {e}")

    async def _flush_buffer(self):
        """刷新缓冲区到数据库"""
        if not self._data_buffer:
            return

        try:
            with sqlite3.connect(self.db_path) as conn:
                cursor = conn.cursor()

                # 批量插入数据
                data_to_insert = []
                for data in self._data_buffer:
                    data_to_insert.append((
                        data.timestamp,
                        data.session_id,
                        data.student_id,
                        data.data_type,
                        json.dumps(data.data_value),
                        data.confidence,
                        data.source_device
                    ))

                cursor.executemany('''
                    INSERT INTO timestamped_data 
                    (timestamp, session_id, student_id, data_type, data_value, confidence, source_device)
                    VALUES (?, ?, ?, ?, ?, ?, ?)
                ''', data_to_insert)

                conn.commit()
                logger.debug(
                    f"Flushed {len(self._data_buffer)} data points to database")

                # 清空缓冲区
                self._data_buffer.clear()

        except Exception as e:
            logger.error(f"Failed to flush buffer: {e}")

    def query_data_by_session(self, session_id: str, data_type: Optional[str] = None) -> List[Dict[str, Any]]:
        """查询会话数据"""
        try:
            with sqlite3.connect(self.db_path) as conn:
                cursor = conn.cursor()

                if data_type:
                    cursor.execute('''
                        SELECT * FROM timestamped_data 
                        WHERE session_id = ? AND data_type = ?
                        ORDER BY timestamp
                    ''', (session_id, data_type))
                else:
                    cursor.execute('''
                        SELECT * FROM timestamped_data 
                        WHERE session_id = ?
                        ORDER BY timestamp
                    ''', (session_id,))

                columns = [description[0]
                           for description in cursor.description]
                results = []

                for row in cursor.fetchall():
                    data = dict(zip(columns, row))
                    # 解析JSON数据
                    try:
                        data['data_value'] = json.loads(data['data_value'])
                    except:
                        pass
                    results.append(data)

                return results

        except Exception as e:
            logger.error(f"Failed to query data: {e}")
            return []

    def query_interruption_events(self, session_id: str) -> List[Dict[str, Any]]:
        """查询打断事件"""
        try:
            with sqlite3.connect(self.db_path) as conn:
                cursor = conn.cursor()
                cursor.execute('''
                    SELECT * FROM interruption_events 
                    WHERE session_id = ?
                    ORDER BY start_timestamp
                ''', (session_id,))

                columns = [description[0]
                           for description in cursor.description]
                results = []

                for row in cursor.fetchall():
                    data = dict(zip(columns, row))
                    # 解析JSON数据
                    try:
                        data['context_data'] = json.loads(data['context_data'])
                    except:
                        data['context_data'] = {}
                    results.append(data)

                return results

        except Exception as e:
            logger.error(f"Failed to query interruption events: {e}")
            return []

    def query_task_switch_events(self, session_id: str) -> List[Dict[str, Any]]:
        """查询任务切换事件"""
        try:
            with sqlite3.connect(self.db_path) as conn:
                cursor = conn.cursor()
                cursor.execute('''
                    SELECT * FROM task_switch_events 
                    WHERE session_id = ?
                    ORDER BY timestamp
                ''', (session_id,))

                columns = [description[0]
                           for description in cursor.description]
                return [dict(zip(columns, row)) for row in cursor.fetchall()]

        except Exception as e:
            logger.error(f"Failed to query task switch events: {e}")
            return []

    def get_session_statistics(self, session_id: str) -> Dict[str, Any]:
        """获取会话统计信息"""
        try:
            # 基础数据统计
            data_points = self.query_data_by_session(session_id)
            interruption_events = self.query_interruption_events(session_id)
            task_switch_events = self.query_task_switch_events(session_id)

            if not data_points:
                return {}

            # 计算会话时长
            start_time = min(data['timestamp'] for data in data_points)
            end_time = max(data['timestamp'] for data in data_points)
            session_duration = end_time - start_time

            # 统计打断信息
            total_interruptions = len(interruption_events)
            total_interruption_time = sum(
                event.get('duration', 0) or 0 for event in interruption_events
            )

            # 统计任务切换信息
            total_task_switches = len(task_switch_events)
            avg_switch_duration = (
                sum(event['switch_duration']
                    for event in task_switch_events) / total_task_switches
                if total_task_switches > 0 else 0
            )

            # 按数据类型统计
            data_type_counts = {}
            for data in data_points:
                data_type = data['data_type']
                data_type_counts[data_type] = data_type_counts.get(
                    data_type, 0) + 1

            return {
                "session_id": session_id,
                "session_duration": session_duration,
                "total_data_points": len(data_points),
                "data_type_counts": data_type_counts,
                "total_interruptions": total_interruptions,
                "total_interruption_time": total_interruption_time,
                "interruption_rate": total_interruptions / (session_duration / 3600) if session_duration > 0 else 0,
                "total_task_switches": total_task_switches,
                "avg_switch_duration": avg_switch_duration,
                "effective_study_time": session_duration - total_interruption_time,
                "study_efficiency": (session_duration - total_interruption_time) / session_duration if session_duration > 0 else 0
            }

        except Exception as e:
            logger.error(f"Failed to get session statistics: {e}")
            return {}

    def cleanup_old_data(self, days_to_keep: int = 30):
        """清理旧数据"""
        try:
            cutoff_timestamp = time.time() - (days_to_keep * 24 * 3600)

            with sqlite3.connect(self.db_path) as conn:
                cursor = conn.cursor()

                # 删除旧的时序数据
                cursor.execute(
                    'DELETE FROM timestamped_data WHERE timestamp < ?', (cutoff_timestamp,))
                deleted_data = cursor.rowcount

                # 删除旧的打断事件
                cursor.execute(
                    'DELETE FROM interruption_events WHERE start_timestamp < ?', (cutoff_timestamp,))
                deleted_interruptions = cursor.rowcount

                # 删除旧的任务切换事件
                cursor.execute(
                    'DELETE FROM task_switch_events WHERE timestamp < ?', (cutoff_timestamp,))
                deleted_switches = cursor.rowcount

                conn.commit()

                logger.info(f"Cleaned up old data: {deleted_data} data points, "
                            f"{deleted_interruptions} interruptions, {deleted_switches} task switches")

        except Exception as e:
            logger.error(f"Failed to cleanup old data: {e}")
