import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from mysql.connector.pooling import MySQLConnectionPool
from mysql.connector import Error
from sklearn.preprocessing import StandardScaler
from tensorflow.keras.models import Sequential, load_model
from tensorflow.keras.layers import Dense, LSTM, Dropout
from tensorflow.keras.callbacks import Callback
class DatabaseManager:
    def __init__(self, config=DB_CONFIG):
        self.config = config
        self.pool = None
        self.connect()

    def connect(self):
        """连接到MySQL数据库池"""
        try:
            self.pool = MySQLConnectionPool(**self.config)
            return True, "数据库连接池创建成功"
        except Error as e:
            return False, f"数据库连接失败: {str(e)}"

    def get_connection(self):
        """从连接池获取连接"""
        try:
            return self.pool.get_connection()
        except Error as e:
            return None

    def execute_query(self, query, values=None):
        """执行SQL查询（INSERT, UPDATE, DELETE）"""
        conn = self.get_connection()
        if not conn:
            return False, "获取数据库连接失败"

        cursor = conn.cursor()
        try:
            cursor.execute(query, values)
            conn.commit()
            return True, "查询执行成功"
        except Error as e:
            conn.rollback()
            return False, f"查询执行失败: {str(e)}"
        finally:
            cursor.close()
            conn.close()

    def execute_read_query(self, query, values=None):
        """执行SQL读取查询（SELECT）"""
        conn = self.get_connection()
        if not conn:
            return False, "获取数据库连接失败"

        cursor = conn.cursor(dictionary=True)
        result = None
        try:
            cursor.execute(query, values)
            result = cursor.fetchall()
            return True, result
        except Error as e:
            return False, f"查询执行失败: {str(e)}"
        finally:
            cursor.close()
            conn.close()

    def create_tables(self):
        """创建必要的数据库表"""
        # 创建engines表
        create_engine_table = """
        CREATE TABLE IF NOT EXISTS engines (
            id INT PRIMARY KEY AUTO_INCREMENT,
            engine_id VARCHAR(20) NOT NULL,
            subset VARCHAR(10) NOT NULL,
            max_cycle INT NOT NULL
        )
        """

        # 创建sensor_data表
        columns = ", ".join([f"sensor_{i} FLOAT" for i in range(1, 25)])
        create_sensor_data_table = f"""
        CREATE TABLE IF NOT EXISTS sensor_data (
            id INT PRIMARY KEY AUTO_INCREMENT,
            engine_id INT NOT NULL,
            cycle INT NOT NULL,
            {columns},
            FOREIGN KEY (engine_id) REFERENCES engines(id)
        )
        """

        # 创建predictions表
        create_predictions_table = """
        CREATE TABLE IF NOT EXISTS predictions (
            id INT PRIMARY KEY AUTO_INCREMENT,
            engine_id INT NOT NULL,
            cycle INT NOT NULL,
            predicted_rul FLOAT NOT NULL,
            actual_rul FLOAT,
            timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
            FOREIGN KEY (engine_id) REFERENCES engines(id)
        )
        """

        # 执行创建表的操作
        success1, msg1 = self.execute_query(create_engine_table)
        success2, msg2 = self.execute_query(create_sensor_data_table)
        success3, msg3 = self.execute_query(create_predictions_table)

        return all([success1, success2, success3]), f"{msg1}; {msg2}; {msg3}"

    def import_data(self, data, subset, progress_callback=None):
        """将数据导入数据库（优化版）"""
        try:
            # 先导入发动机信息
            engines = data[['engine_id', 'cycle']].groupby('engine_id').max().reset_index()
            engine_id_map = {}  # 存储原始engine_id到数据库id的映射

            total_engines = len(engines)

            # 开始事务
            conn = self.get_connection()
            if not conn:
                return False, "获取数据库连接失败"

            cursor = conn.cursor()

            # 插入发动机信息
            for i, (_, row) in enumerate(engines.iterrows()):
                # 确保所有值都是Python原生类型
                engine_id = int(row['engine_id'])
                max_cycle = int(row['cycle'])

                query = "INSERT INTO engines (engine_id, subset, max_cycle) VALUES (%s, %s, %s)"
                values = (engine_id, subset, max_cycle)

                try:
                    cursor.execute(query, values)
                    conn.commit()
                except Error as e:
                    conn.rollback()
                    cursor.close()
                    conn.close()
                    return False, f"插入发动机 {engine_id} 失败: {str(e)}"

                # 获取插入的ID
                cursor.execute("SELECT LAST_INSERT_ID()")
                db_engine_id = cursor.fetchone()[0]
                engine_id_map[engine_id] = db_engine_id

                # 更新进度
                if progress_callback and i % max(1, total_engines // 100) == 0:
                    progress_callback(int(i / total_engines * 50))  # 前50%进度

            # 导入传感器数据（分批处理）
            batch_size = 1000  # 每批处理的记录数
            total_batches = sum(len(data[data['engine_id'] == eid]) // batch_size + 1 for eid in engine_id_map)
            processed_batches = 0

            for engine_id, db_id in engine_id_map.items():
                engine_data = data[data['engine_id'] == engine_id]
                total_records = len(engine_data)

                for batch_start in range(0, total_records, batch_size):
                    batch_data = engine_data.iloc[batch_start:batch_start + batch_size]
                    values_list = []

                    for _, row in batch_data.iterrows():
                        # 构建单条记录的值，确保所有值都是Python原生类型
                        values = [int(db_id), int(row['cycle'])]  # engine_id和cycle转为int
                        for i in range(1, 25):
                            # 将传感器值转为float（处理numpy.float64）
                            values.append(float(row[f'sensor_{i}']))
                        values_list.append(tuple(values))

                    # 批量插入
                    columns = "engine_id, cycle, " + ", ".join([f"sensor_{i}" for i in range(1, 25)])
                    placeholders = ", ".join(["%s"] * 26)  # 26个字段
                    query = f"INSERT INTO sensor_data ({columns}) VALUES ({placeholders})"

                    try:
                        cursor.executemany(query, values_list)
                        conn.commit()
                    except Error as e:
                        conn.rollback()
                        cursor.close()
                        conn.close()
                        return False, f"插入发动机 {engine_id} 数据失败: {str(e)}"

                    processed_batches += 1
                    if progress_callback:
                        progress = 50 + int(processed_batches / total_batches * 50)
                        progress_callback(min(progress, 100))

            cursor.close()
            conn.close()
            return True, f"成功导入 {len(engine_id_map)} 台发动机数据"

        except Exception as e:
            if conn and conn.is_connected():
                conn.rollback()
                conn.close()
            return False, f"导入数据时发生异常: {str(e)}"