#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import mysql.connector
import json
import pickle
import base64
import numpy as np
from datetime import datetime, timedelta
from typing import Dict, List, Any, Optional, Union
import logging

import yaml
import os


class KylinDatabase:
    """Kylin监控系统数据库管理类"""

    def __init__(self, host=None, port=None, user=None, password=None, database=None):
        # 首先尝试从配置文件读取配置
        config_path = os.path.join(os.path.dirname(__file__), 'config.yaml')
        if os.path.exists(config_path):
            with open(config_path, 'r', encoding='utf-8') as f:
                config = yaml.safe_load(f)
            db_config = config.get('database', {})
            
            # 如果参数未提供，则从配置文件读取
            self.host = host if host is not None else db_config.get('host', 'localhost')
            self.port = port if port is not None else db_config.get('port', 3306)
            self.user = user if user is not None else db_config.get('user', 'root')
            self.password = password if password is not None else db_config.get('password', 'Huawei@678')
            self.database = database if database is not None else db_config.get('database', 'kylin_monitor')
        else:
            # 如果配置文件不存在，则使用默认值或传入的参数
            self.host = host if host is not None else 'localhost'
            self.port = port if port is not None else 3306
            self.user = user if user is not None else 'root'
            self.password = password if password is not None else 'Huawei@678'
            self.database = database if database is not None else 'kylin_monitor'
        
        self.connection = None
        self.setup_logging()

    def setup_logging(self):
        """设置日志"""
        logging.basicConfig(
            level=logging.INFO,
            format='%(asctime)s - %(levelname)s - %(message)s'
        )
        self.logger = logging.getLogger(__name__)

    def connect(self):
        """连接到MySQL数据库"""
        try:
            self.connection = mysql.connector.connect(
                host=self.host,
                port=self.port,
                user=self.user,
                password=self.password,
                database=self.database,
                charset='utf8mb4',
                autocommit=True,
                auth_plugin='mysql_native_password'  # 添加这一行
            )
            self.logger.info(f"成功连接到MySQL数据库: {self.database}")
            return True
        except mysql.connector.Error as e:
            self.logger.error(f"连接MySQL数据库失败: {e}")
            return False

    def disconnect(self):
        """断开数据库连接"""
        if self.connection and self.connection.is_connected():
            self.connection.close()
            self.logger.info("已断开MySQL数据库连接")

    def create_database(self):
        """创建数据库和表结构"""
        try:
            # 创建数据库连接（不指定数据库）
            conn = mysql.connector.connect(
                host=self.host,
                port=self.port,
                user=self.user,
                password=self.password,
                charset='utf8mb4',
                auth_plugin='mysql_native_password'  # 添加这一行
            )
            cursor = conn.cursor()

            # 创建数据库
            cursor.execute(
                f"CREATE DATABASE IF NOT EXISTS {self.database} CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci")
            cursor.execute(f"USE {self.database}")

            # 创建训练数据表
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS training_data (
                    id BIGINT AUTO_INCREMENT PRIMARY KEY,
                    timestamp DATETIME NOT NULL,
                    metrics JSON NOT NULL,
                    features JSON NOT NULL,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    INDEX idx_timestamp (timestamp),
                    INDEX idx_created_at (created_at)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)

            # 创建模型表
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS trained_models (
                    id BIGINT AUTO_INCREMENT PRIMARY KEY,
                    model_name VARCHAR(100) NOT NULL,
                    model_version VARCHAR(50) NOT NULL,
                    model_data LONGTEXT NOT NULL,
                    model_info JSON NOT NULL,
                    training_samples INT NOT NULL,
                    feature_dim INT NOT NULL,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                    is_active BOOLEAN DEFAULT TRUE,
                    UNIQUE KEY unique_model_version (model_name, model_version),
                    INDEX idx_model_name (model_name),
                    INDEX idx_is_active (is_active),
                    INDEX idx_created_at (created_at)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)

            # 创建检测结果表
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS detection_results (
                    id BIGINT AUTO_INCREMENT PRIMARY KEY,
                    timestamp DATETIME NOT NULL,
                    metrics JSON NOT NULL,
                    anomalies JSON NOT NULL,
                    risk_level INT NOT NULL,
                    model_version VARCHAR(50),
                    detection_time_ms INT,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    INDEX idx_timestamp (timestamp),
                    INDEX idx_risk_level (risk_level),
                    INDEX idx_model_version (model_version),
                    INDEX idx_created_at (created_at)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)

            # 创建系统配置表
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS system_config (
                    id INT AUTO_INCREMENT PRIMARY KEY,
                    config_key VARCHAR(100) NOT NULL UNIQUE,
                    config_value JSON NOT NULL,
                    description TEXT,
                    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                    INDEX idx_config_key (config_key)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)

            # 创建模型性能表
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS model_performance (
                    id BIGINT AUTO_INCREMENT PRIMARY KEY,
                    model_name VARCHAR(100) NOT NULL,
                    model_version VARCHAR(50) NOT NULL,
                    accuracy FLOAT,
                    precisions FLOAT,
                    recall FLOAT,
                    f1_score FLOAT,
                    training_time_seconds FLOAT,
                    inference_time_ms FLOAT,
                    test_samples INT,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    INDEX idx_model_name (model_name),
                    INDEX idx_model_version (model_version),
                    INDEX idx_created_at (created_at)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)

            conn.commit()
            cursor.close()
            conn.close()

            self.logger.info("数据库和表结构创建成功")
            return True

        except mysql.connector.Error as e:
            self.logger.error(f"创建数据库结构失败: {e}")
            return False

    def save_training_data(self, metrics: Dict, features: Dict, timestamp: datetime = None):
        """保存训练数据到数据库"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return False

        try:
            cursor = self.connection.cursor()

            if timestamp is None:
                timestamp = datetime.now()

            # 插入训练数据
            cursor.execute("""
                INSERT INTO training_data (timestamp, metrics, features)
                VALUES (%s, %s, %s)
            """, (
                timestamp,
                json.dumps(metrics, ensure_ascii=False, default=str),
                json.dumps(features, ensure_ascii=False, default=str)
            ))

            cursor.close()
            return True

        except mysql.connector.Error as e:
            self.logger.error(f"保存训练数据失败: {e}")
            return False

    def get_training_data(self, hours: int = 24, limit: int = None) -> List[Dict]:
        """获取训练数据"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return []

        try:
            cursor = self.connection.cursor(dictionary=True)

            # 构建查询语句
            query = """
                SELECT timestamp, metrics, features
                FROM training_data
                WHERE timestamp >= %s
                ORDER BY timestamp DESC
            """
            params = [datetime.now() - timedelta(hours=hours)]

            if limit:
                query += f" LIMIT {limit}"

            cursor.execute(query, params)
            results = cursor.fetchall()

            # 解析JSON数据
            data = []
            for row in results:
                data.append({
                    'timestamp': row['timestamp'],
                    'metrics': json.loads(row['metrics']),
                    'features': json.loads(row['features'])
                })

            cursor.close()
            return data

        except mysql.connector.Error as e:
            self.logger.error(f"获取训练数据失败: {e}")
            return []

    def save_model(self, model_name: str, model_version: str, model: Any,
                   model_info: Dict, training_samples: int, feature_dim: int):
        """保存训练好的模型到数据库"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return False

        try:
            cursor = self.connection.cursor()

            # 序列化模型
            model_data = base64.b64encode(pickle.dumps(model)).decode('utf-8')

            # 先停用旧版本的模型
            cursor.execute("""
                UPDATE trained_models 
                SET is_active = FALSE 
                WHERE model_name = %s
            """, (model_name,))

            # 插入新模型
            cursor.execute("""
                INSERT INTO trained_models 
                (model_name, model_version, model_data, model_info, training_samples, feature_dim)
                VALUES (%s, %s, %s, %s, %s, %s)
            """, (
                model_name,
                model_version,
                model_data,
                json.dumps(model_info, ensure_ascii=False, default=str),
                training_samples,
                feature_dim
            ))

            cursor.close()
            self.logger.info(f"模型 {model_name} v{model_version} 保存成功")
            return True

        except mysql.connector.Error as e:
            self.logger.error(f"保存模型失败: {e}")
            return False

    def load_model(self, model_name: str, model_version: str = None) -> Optional[Any]:
        """从数据库加载模型"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return None

        try:
            cursor = self.connection.cursor(dictionary=True)

            if model_version:
                # 加载指定版本的模型
                cursor.execute("""
                    SELECT model_data, model_info
                    FROM trained_models
                    WHERE model_name = %s AND model_version = %s
                """, (model_name, model_version))
            else:
                # 加载最新的活跃模型
                cursor.execute("""
                    SELECT model_data, model_info
                    FROM trained_models
                    WHERE model_name = %s AND is_active = TRUE
                    ORDER BY created_at DESC
                    LIMIT 1
                """, (model_name,))

            result = cursor.fetchone()
            cursor.close()

            if result:
                # 反序列化模型
                model_data = base64.b64decode(result['model_data'])
                model = pickle.loads(model_data)
                self.logger.info(f"成功加载模型 {model_name}")
                return model
            else:
                self.logger.warning(f"未找到模型 {model_name}")
                return None

        except mysql.connector.Error as e:
            self.logger.error(f"加载模型失败: {e}")
            return None

    def get_available_models(self) -> List[Dict]:
        """获取可用的模型列表"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return []

        try:
            cursor = self.connection.cursor(dictionary=True)

            cursor.execute("""
                SELECT model_name, model_version, model_info, training_samples, 
                       feature_dim, created_at, is_active
                FROM trained_models
                ORDER BY created_at DESC
            """)

            results = cursor.fetchall()
            cursor.close()

            models = []
            for row in results:
                models.append({
                    'model_name': row['model_name'],
                    'model_version': row['model_version'],
                    'model_info': json.loads(row['model_info']),
                    'training_samples': row['training_samples'],
                    'feature_dim': row['feature_dim'],
                    'created_at': row['created_at'],
                    'is_active': row['is_active']
                })

            return models

        except mysql.connector.Error as e:
            self.logger.error(f"获取模型列表失败: {e}")
            return []

    def save_detection_result(self, metrics: Dict, anomalies: List[Dict],
                              risk_level: int, model_version: str = None,
                              detection_time_ms: int = None):
        """保存检测结果到数据库"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return False

        try:
            cursor = self.connection.cursor()

            cursor.execute("""
                INSERT INTO detection_results 
                (timestamp, metrics, anomalies, risk_level, model_version, detection_time_ms)
                VALUES (%s, %s, %s, %s, %s, %s)
            """, (
                datetime.now(),
                json.dumps(metrics, ensure_ascii=False, default=str),
                json.dumps(anomalies, ensure_ascii=False, default=str),
                risk_level,
                model_version,
                detection_time_ms
            ))

            cursor.close()
            return True

        except mysql.connector.Error as e:
            self.logger.error(f"保存检测结果失败: {e}")
            return False

    def get_detection_history(self, hours: int = 24, limit: int = 100) -> List[Dict]:
        """获取检测历史"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return []

        try:
            cursor = self.connection.cursor(dictionary=True)

            cursor.execute("""
                SELECT timestamp, metrics, anomalies, risk_level, model_version, detection_time_ms
                FROM detection_results
                WHERE timestamp >= %s
                ORDER BY timestamp DESC
                LIMIT %s
            """, (datetime.now() - timedelta(hours=hours), limit))

            results = cursor.fetchall()
            cursor.close()

            history = []
            for row in results:
                history.append({
                    'timestamp': row['timestamp'],
                    'metrics': json.loads(row['metrics']),
                    'anomalies': json.loads(row['anomalies']),
                    'risk_level': row['risk_level'],
                    'model_version': row['model_version'],
                    'detection_time_ms': row['detection_time_ms']
                })

            return history

        except mysql.connector.Error as e:
            self.logger.error(f"获取检测历史失败: {e}")
            return []

    def save_system_config(self, config_key: str, config_value: Dict, description: str = None):
        """保存系统配置"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return False

        try:
            cursor = self.connection.cursor()

            cursor.execute("""
                INSERT INTO system_config (config_key, config_value, description)
                VALUES (%s, %s, %s)
                ON DUPLICATE KEY UPDATE
                config_value = VALUES(config_value),
                description = VALUES(description)
            """, (
                config_key,
                json.dumps(config_value, ensure_ascii=False, default=str),
                description
            ))

            cursor.close()
            return True

        except mysql.connector.Error as e:
            self.logger.error(f"保存系统配置失败: {e}")
            return False

    def get_system_config(self, config_key: str) -> Optional[Dict]:
        """获取系统配置"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return None

        try:
            cursor = self.connection.cursor(dictionary=True)

            cursor.execute("""
                SELECT config_value
                FROM system_config
                WHERE config_key = %s
            """, (config_key,))

            result = cursor.fetchone()
            cursor.close()

            if result:
                return json.loads(result['config_value'])
            else:
                return None

        except mysql.connector.Error as e:
            self.logger.error(f"获取系统配置失败: {e}")
            return None

    def save_model_performance(self, model_name: str, model_version: str,
                               accuracy: float = None, precisions: float = None,
                               recall: float = None, f1_score: float = None,
                               training_time_seconds: float = None,
                               inference_time_ms: float = None, test_samples: int = None):
        """保存模型性能指标"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return False

        try:
            cursor = self.connection.cursor()

            cursor.execute("""
                INSERT INTO model_performance 
                (model_name, model_version, accuracy, precisions, recall, f1_score,
                 training_time_seconds, inference_time_ms, test_samples)
                VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
            """, (
                model_name, model_version, accuracy, precisions, recall, f1_score,
                training_time_seconds, inference_time_ms, test_samples
            ))

            cursor.close()
            return True

        except mysql.connector.Error as e:
            self.logger.error(f"保存模型性能失败: {e}")
            return False

    def cleanup_old_data(self, days: int = 30):
        """清理旧数据"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return False

        try:
            cursor = self.connection.cursor()
            cutoff_date = datetime.now() - timedelta(days=days)

            # 清理训练数据
            cursor.execute("""
                DELETE FROM training_data 
                WHERE created_at < %s
            """, (cutoff_date,))
            training_deleted = cursor.rowcount

            # 清理检测结果
            cursor.execute("""
                DELETE FROM detection_results 
                WHERE created_at < %s
            """, (cutoff_date,))
            detection_deleted = cursor.rowcount

            # 清理模型性能数据
            cursor.execute("""
                DELETE FROM model_performance 
                WHERE created_at < %s
            """, (cutoff_date,))
            performance_deleted = cursor.rowcount

            cursor.close()

            self.logger.info(
                f"数据清理完成: 训练数据 {training_deleted} 条, 检测结果 {detection_deleted} 条, 性能数据 {performance_deleted} 条")
            return True

        except mysql.connector.Error as e:
            self.logger.error(f"数据清理失败: {e}")
            return False

    def get_database_stats(self) -> Dict:
        """获取数据库统计信息"""
        if not self.connection or not self.connection.is_connected():
            if not self.connect():
                return {}

        try:
            cursor = self.connection.cursor(dictionary=True)
            stats = {}

            # 训练数据统计
            cursor.execute("SELECT COUNT(*) as count FROM training_data")
            stats['training_data_count'] = cursor.fetchone()['count']

            # 模型统计
            cursor.execute("SELECT COUNT(*) as count FROM trained_models")
            stats['models_count'] = cursor.fetchone()['count']

            cursor.execute("SELECT COUNT(*) as count FROM trained_models WHERE is_active = TRUE")
            stats['active_models_count'] = cursor.fetchone()['count']

            # 检测结果统计
            cursor.execute("SELECT COUNT(*) as count FROM detection_results")
            stats['detection_results_count'] = cursor.fetchone()['count']

            # 最近1小时的检测结果
            cursor.execute("""
                SELECT COUNT(*) as count 
                FROM detection_results 
                WHERE timestamp >= %s
            """, (datetime.now() - timedelta(hours=1),))
            stats['recent_detections_1h'] = cursor.fetchone()['count']

            cursor.close()
            return stats

        except mysql.connector.Error as e:
            self.logger.error(f"获取数据库统计失败: {e}")
            return {}


def create_database_script():
    """创建数据库初始化脚本"""
    script = """
-- Kylin监控系统数据库初始化脚本

-- 创建数据库
CREATE DATABASE IF NOT EXISTS kylin_monitor CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
USE kylin_monitor;

-- 创建训练数据表
CREATE TABLE IF NOT EXISTS training_data (
    id BIGINT AUTO_INCREMENT PRIMARY KEY,
    timestamp DATETIME NOT NULL,
    metrics JSON NOT NULL,
    features JSON NOT NULL,
    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
    INDEX idx_timestamp (timestamp),
    INDEX idx_created_at (created_at)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;

-- 创建模型表
CREATE TABLE IF NOT EXISTS trained_models (
    id BIGINT AUTO_INCREMENT PRIMARY KEY,
    model_name VARCHAR(100) NOT NULL,
    model_version VARCHAR(50) NOT NULL,
    model_data LONGTEXT NOT NULL,
    model_info JSON NOT NULL,
    training_samples INT NOT NULL,
    feature_dim INT NOT NULL,
    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
    is_active BOOLEAN DEFAULT TRUE,
    UNIQUE KEY unique_model_version (model_name, model_version),
    INDEX idx_model_name (model_name),
    INDEX idx_is_active (is_active),
    INDEX idx_created_at (created_at)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;

-- 创建检测结果表
CREATE TABLE IF NOT EXISTS detection_results (
    id BIGINT AUTO_INCREMENT PRIMARY KEY,
    timestamp DATETIME NOT NULL,
    metrics JSON NOT NULL,
    anomalies JSON NOT NULL,
    risk_level INT NOT NULL,
    model_version VARCHAR(50),
    detection_time_ms INT,
    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
    INDEX idx_timestamp (timestamp),
    INDEX idx_risk_level (risk_level),
    INDEX idx_model_version (model_version),
    INDEX idx_created_at (created_at)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;

-- 创建系统配置表
CREATE TABLE IF NOT EXISTS system_config (
    id INT AUTO_INCREMENT PRIMARY KEY,
    config_key VARCHAR(100) NOT NULL UNIQUE,
    config_value JSON NOT NULL,
    description TEXT,
    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
    INDEX idx_config_key (config_key)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;

-- 创建模型性能表
CREATE TABLE IF NOT EXISTS model_performance (
    id BIGINT AUTO_INCREMENT PRIMARY KEY,
    model_name VARCHAR(100) NOT NULL,
    model_version VARCHAR(50) NOT NULL,
    accuracy FLOAT,
    precisions FLOAT,
    recall FLOAT,
    f1_score FLOAT,
    training_time_seconds FLOAT,
    inference_time_ms FLOAT,
    test_samples INT,
    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
    INDEX idx_model_name (model_name),
    INDEX idx_model_version (model_version),
    INDEX idx_created_at (created_at)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;

-- 插入默认配置
INSERT INTO system_config (config_key, config_value, description) VALUES
                    ('training_config', '{"duration_hours": 1, "min_samples": 720, "collection_interval": 5}', '训练配置'),
('detection_config', '{"risk_threshold": 7, "anomaly_threshold": 0.1}', '检测配置'),
('model_config', '{"auto_retrain": true, "retrain_interval_hours": 168}', '模型配置')
ON DUPLICATE KEY UPDATE config_value = VALUES(config_value);
"""
    return script


if __name__ == "__main__":
    # 测试数据库连接
    db = KylinDatabase()

    if db.connect():
        print("数据库连接成功")

        # 创建数据库结构
        if db.create_database():
            print("数据库结构创建成功")

            # 获取数据库统计
            stats = db.get_database_stats()
            print(f"数据库统计: {stats}")
        else:
            print("数据库结构创建失败")
    else:
        print("数据库连接失败")

    db.disconnect()