#!/usr/bin/env python3
# -*- coding: utf-8 -*-

"""
PostgreSQL性能测试模块
提供数据库性能测试功能，帮助用户评估数据库配置性能
"""

import os
import time
import psycopg2
import random
import string
import threading
import concurrent.futures
import datetime
import statistics
from typing import Dict, List, Tuple, Optional, Any, Union, Callable
from dataclasses import dataclass

from rich.console import Console
from rich.table import Table
from rich.progress import Progress, TaskID
from rich.panel import Panel
from rich.box import ROUNDED

from ..core import config
from ..core.ui_utils import console, STYLES, SYMBOLS, print_info, print_error, print_warning, print_success, get_input, confirm, ProgressManager
from ..core.utils import logger

@dataclass
class TestResult:
    """性能测试结果数据类"""
    operation: str
    total_time: float
    avg_time: float
    min_time: float
    max_time: float
    ops_per_second: float
    success_count: int
    error_count: int
    

class PostgresqlPerformanceTester:
    """PostgreSQL数据库性能测试类"""
    
    def __init__(self):
        """初始化性能测试器"""
        self.conn = None
        self.conn_params = {}
        self.test_schema = "perf_test"
        self.test_table = "performance_test"
        self.report_data = {}
        
    def connect(self, host: str = "localhost", port: int = 5432, 
                dbname: str = "postgres", user: str = "postgres", 
                password: Optional[str] = None) -> bool:
        """连接到PostgreSQL数据库"""
        try:
            # 保存连接参数用于重连
            self.conn_params = {
                'host': host,
                'port': port,
                'dbname': dbname,
                'user': user,
                'password': password
            }
            
            self.conn = psycopg2.connect(
                host=host,
                port=port,
                dbname=dbname,
                user=user,
                password=password
            )
            self.conn.set_session(autocommit=True)
            
            # 获取PostgreSQL版本信息
            with self.conn.cursor() as cursor:
                cursor.execute("SELECT version()")
                self.version_info = cursor.fetchone()[0]
                
            return True
        except Exception as e:
            print_error(f"连接数据库失败: {e}")
            return False
            
    def _execute_query(self, query: str, params: tuple = None) -> bool:
        """执行无返回值的SQL查询"""
        try:
            if not self.conn or self.conn.closed:
                if not self.connect(**self.conn_params):
                    return False
                
            with self.conn.cursor() as cursor:
                cursor.execute(query, params)
            return True
        except Exception as e:
            logger.error(f"执行查询时出错: {e}")
            return False
            
    def _execute_query_with_result(self, query: str, params: tuple = None) -> List[tuple]:
        """执行有返回值的SQL查询"""
        try:
            if not self.conn or self.conn.closed:
                if not self.connect(**self.conn_params):
                    return []
                
            with self.conn.cursor() as cursor:
                cursor.execute(query, params)
                return cursor.fetchall()
        except Exception as e:
            logger.error(f"执行查询时出错: {e}")
            return []
    
    def _generate_random_string(self, length: int = 10) -> str:
        """生成随机字符串，用于测试数据"""
        return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(length))
    
    def _generate_random_number(self, min_val: int = 1, max_val: int = 1000) -> int:
        """生成随机数字，用于测试数据"""
        return random.randint(min_val, max_val)
        
    def setup_test_environment(self) -> bool:
        """创建测试模式和测试表"""
        try:
            print_info(f"创建测试环境...")
            
            # 创建专用的测试模式
            create_schema_query = f"CREATE SCHEMA IF NOT EXISTS {self.test_schema}"
            if not self._execute_query(create_schema_query):
                return False
                
            # 创建测试表
            create_table_query = f"""
            CREATE TABLE IF NOT EXISTS {self.test_schema}.{self.test_table} (
                id SERIAL PRIMARY KEY,
                text_data VARCHAR(255),
                numeric_data INTEGER,
                random_data TEXT,
                created_at TIMESTAMP DEFAULT NOW()
            )
            """
            if not self._execute_query(create_table_query):
                return False
                
            print_success(f"测试环境创建成功")
            return True
        except Exception as e:
            print_error(f"创建测试环境时出错: {e}")
            return False
            
    def cleanup_test_environment(self) -> bool:
        """清理测试环境，删除临时表和模式"""
        try:
            print_info(f"清理测试环境...")
            
            # 删除测试表和模式
            drop_schema_query = f"DROP SCHEMA IF EXISTS {self.test_schema} CASCADE"
            if not self._execute_query(drop_schema_query):
                return False
                
            print_success(f"测试环境清理完成，所有测试数据已移除")
            return True
        except Exception as e:
            print_error(f"清理测试环境时出错: {e}")
            return False
            
    def run_insert_test(self, records: int = 1000, batch_size: int = 100) -> TestResult:
        """执行插入测试"""
        print_info(f"开始插入性能测试: {records} 条记录, 批量大小: {batch_size}")
        
        times = []
        total_start = time.time()
        success_count = 0
        error_count = 0
        
        with ProgressManager(total=records, description="执行插入测试") as progress:
            for i in range(0, records, batch_size):
                # 计算当前批次的实际大小
                current_batch_size = min(batch_size, records - i)
                
                # 准备批量插入语句
                values_placeholders = []
                values_data = []
                
                for j in range(current_batch_size):
                    text_data = self._generate_random_string(20)
                    numeric_data = self._generate_random_number()
                    random_data = self._generate_random_string(50)
                    
                    values_placeholders.append(f"(%s, %s, %s)")
                    values_data.extend([text_data, numeric_data, random_data])
                
                query = f"""
                INSERT INTO {self.test_schema}.{self.test_table} 
                (text_data, numeric_data, random_data)
                VALUES {', '.join(values_placeholders)}
                """
                
                # 执行批量插入
                start_time = time.time()
                success = self._execute_query(query, tuple(values_data))
                end_time = time.time()
                
                if success:
                    times.append(end_time - start_time)
                    success_count += current_batch_size
                else:
                    error_count += current_batch_size
                
                # 更新进度
                progress.update(current_batch_size)
        
        total_time = time.time() - total_start
        
        # 计算统计信息
        if times:
            avg_time = sum(times) / len(times)
            min_time = min(times)
            max_time = max(times)
            ops_per_second = success_count / total_time
        else:
            avg_time = min_time = max_time = ops_per_second = 0
        
        result = TestResult(
            operation="INSERT",
            total_time=total_time,
            avg_time=avg_time,
            min_time=min_time,
            max_time=max_time,
            ops_per_second=ops_per_second,
            success_count=success_count,
            error_count=error_count
        )
        
        self.report_data["insert_test"] = result
        
        print_success(f"插入测试完成，成功: {success_count}, 失败: {error_count}, 每秒操作数: {ops_per_second:.2f}")
        return result 

    def run_select_test(self, iterations: int = 1000) -> TestResult:
        """执行查询测试"""
        print_info(f"开始查询性能测试: {iterations} 次查询")
        
        # 先获取所有ID，用于随机查询
        id_query = f"""
        SELECT id FROM {self.test_schema}.{self.test_table}
        """
        ids_result = self._execute_query_with_result(id_query)
        
        if not ids_result:
            print_error("未找到测试数据，请先运行插入测试")
            return TestResult(
                operation="SELECT",
                total_time=0,
                avg_time=0,
                min_time=0,
                max_time=0,
                ops_per_second=0,
                success_count=0,
                error_count=iterations
            )
        
        # 提取ID列表
        ids = [row[0] for row in ids_result]
        
        times = []
        total_start = time.time()
        success_count = 0
        error_count = 0
        
        with ProgressManager(total=iterations, description="执行查询测试") as progress:
            for i in range(iterations):
                # 随机选择一个ID
                random_id = random.choice(ids)
                
                # 执行查询
                query = f"""
                SELECT * FROM {self.test_schema}.{self.test_table}
                WHERE id = %s
                """
                
                start_time = time.time()
                result = self._execute_query_with_result(query, (random_id,))
                end_time = time.time()
                
                if result:
                    times.append(end_time - start_time)
                    success_count += 1
                else:
                    error_count += 1
                
                # 更新进度
                progress.update()
        
        total_time = time.time() - total_start
        
        # 计算统计信息
        if times:
            avg_time = sum(times) / len(times)
            min_time = min(times)
            max_time = max(times)
            ops_per_second = success_count / total_time
        else:
            avg_time = min_time = max_time = ops_per_second = 0
        
        result = TestResult(
            operation="SELECT",
            total_time=total_time,
            avg_time=avg_time,
            min_time=min_time,
            max_time=max_time,
            ops_per_second=ops_per_second,
            success_count=success_count,
            error_count=error_count
        )
        
        self.report_data["select_test"] = result
        
        print_success(f"查询测试完成，成功: {success_count}, 失败: {error_count}, 每秒操作数: {ops_per_second:.2f}")
        return result
    
    def run_update_test(self, iterations: int = 1000) -> TestResult:
        """执行更新测试"""
        print_info(f"开始更新性能测试: {iterations} 次更新")
        
        # 先获取所有ID，用于随机更新
        id_query = f"""
        SELECT id FROM {self.test_schema}.{self.test_table}
        """
        ids_result = self._execute_query_with_result(id_query)
        
        if not ids_result:
            print_error("未找到测试数据，请先运行插入测试")
            return TestResult(
                operation="UPDATE",
                total_time=0,
                avg_time=0,
                min_time=0,
                max_time=0,
                ops_per_second=0,
                success_count=0,
                error_count=iterations
            )
        
        # 提取ID列表
        ids = [row[0] for row in ids_result]
        
        times = []
        total_start = time.time()
        success_count = 0
        error_count = 0
        
        with ProgressManager(total=iterations, description="执行更新测试") as progress:
            for i in range(iterations):
                # 随机选择一个ID
                random_id = random.choice(ids)
                new_text = self._generate_random_string(20)
                new_number = self._generate_random_number()
                
                # 执行更新
                query = f"""
                UPDATE {self.test_schema}.{self.test_table}
                SET text_data = %s, numeric_data = %s
                WHERE id = %s
                """
                
                start_time = time.time()
                success = self._execute_query(query, (new_text, new_number, random_id))
                end_time = time.time()
                
                if success:
                    times.append(end_time - start_time)
                    success_count += 1
                else:
                    error_count += 1
                
                # 更新进度
                progress.update()
        
        total_time = time.time() - total_start
        
        # 计算统计信息
        if times:
            avg_time = sum(times) / len(times)
            min_time = min(times)
            max_time = max(times)
            ops_per_second = success_count / total_time
        else:
            avg_time = min_time = max_time = ops_per_second = 0
        
        result = TestResult(
            operation="UPDATE",
            total_time=total_time,
            avg_time=avg_time,
            min_time=min_time,
            max_time=max_time,
            ops_per_second=ops_per_second,
            success_count=success_count,
            error_count=error_count
        )
        
        self.report_data["update_test"] = result
        
        print_success(f"更新测试完成，成功: {success_count}, 失败: {error_count}, 每秒操作数: {ops_per_second:.2f}")
        return result
    
    def run_index_performance_test(self) -> TestResult:
        """执行索引性能测试"""
        print_info("开始索引性能测试")
        
        # 进行索引前的查询测试
        print_info("测试无索引时的查询性能...")
        
        # 确保测试表中有足够的数据
        count_query = f"SELECT COUNT(*) FROM {self.test_schema}.{self.test_table}"
        count_result = self._execute_query_with_result(count_query)
        record_count = count_result[0][0] if count_result else 0
        
        if record_count < 1000:
            print_warning(f"测试表中只有 {record_count} 条记录，性能测试效果可能不明显")
            
        # 重复执行100次文本字段上的模糊查询
        iterations = 100
        times_before_index = []
        success_count_before = 0
        error_count_before = 0
        
        with ProgressManager(total=iterations, description="测试无索引查询") as progress:
            for i in range(iterations):
                # 随机生成一个查询条件
                search_term = self._generate_random_string(2) + '%'
                
                query = f"""
                SELECT * FROM {self.test_schema}.{self.test_table}
                WHERE text_data LIKE %s
                LIMIT 10
                """
                
                start_time = time.time()
                result = self._execute_query_with_result(query, (search_term,))
                end_time = time.time()
                
                if result is not None:
                    times_before_index.append(end_time - start_time)
                    success_count_before += 1
                else:
                    error_count_before += 1
                
                progress.update()
        
        # 计算无索引时的统计信息
        if times_before_index:
            avg_time_before = sum(times_before_index) / len(times_before_index)
            min_time_before = min(times_before_index)
            max_time_before = max(times_before_index)
            ops_per_second_before = success_count_before / sum(times_before_index)
        else:
            avg_time_before = min_time_before = max_time_before = ops_per_second_before = 0
            
        # 创建索引
        print_info("创建索引...")
        create_index_query = f"""
        CREATE INDEX IF NOT EXISTS idx_text_data 
        ON {self.test_schema}.{self.test_table} (text_data)
        """
        
        create_index_start = time.time()
        create_index_success = self._execute_query(create_index_query)
        create_index_time = time.time() - create_index_start
        
        if not create_index_success:
            print_error("创建索引失败")
            return TestResult(
                operation="INDEX",
                total_time=0,
                avg_time=0,
                min_time=0,
                max_time=0,
                ops_per_second=0,
                success_count=0,
                error_count=1
            )
            
        print_success(f"索引创建成功，耗时: {create_index_time:.3f} 秒")
        
        # 重复执行相同类型的查询，测试索引后的性能
        times_after_index = []
        success_count_after = 0
        error_count_after = 0
        
        with ProgressManager(total=iterations, description="测试有索引查询") as progress:
            for i in range(iterations):
                # 随机生成一个查询条件，保持与无索引测试相同的模式
                search_term = self._generate_random_string(2) + '%'
                
                query = f"""
                SELECT * FROM {self.test_schema}.{self.test_table}
                WHERE text_data LIKE %s
                LIMIT 10
                """
                
                start_time = time.time()
                result = self._execute_query_with_result(query, (search_term,))
                end_time = time.time()
                
                if result is not None:
                    times_after_index.append(end_time - start_time)
                    success_count_after += 1
                else:
                    error_count_after += 1
                
                progress.update()
        
        # 计算索引后的统计信息
        if times_after_index:
            avg_time_after = sum(times_after_index) / len(times_after_index)
            min_time_after = min(times_after_index)
            max_time_after = max(times_after_index)
            ops_per_second_after = success_count_after / sum(times_after_index)
        else:
            avg_time_after = min_time_after = max_time_after = ops_per_second_after = 0
            
        # 计算性能提升比例
        if avg_time_before > 0:
            improvement_ratio = (avg_time_before - avg_time_after) / avg_time_before * 100
        else:
            improvement_ratio = 0
            
        # 删除索引
        print_info("清理测试索引...")
        drop_index_query = f"""
        DROP INDEX IF EXISTS {self.test_schema}.idx_text_data
        """
        self._execute_query(drop_index_query)
        
        result = TestResult(
            operation="INDEX",
            total_time=sum(times_after_index),
            avg_time=avg_time_after,
            min_time=min_time_after,
            max_time=max_time_after,
            ops_per_second=ops_per_second_after,
            success_count=success_count_after,
            error_count=error_count_after
        )
        
        # 保存测试结果，包括更多的索引相关信息
        self.report_data["index_test"] = {
            "result": result,
            "before_index": {
                "avg_time": avg_time_before,
                "min_time": min_time_before,
                "max_time": max_time_before,
                "ops_per_second": ops_per_second_before
            },
            "after_index": {
                "avg_time": avg_time_after,
                "min_time": min_time_after,
                "max_time": max_time_after,
                "ops_per_second": ops_per_second_after
            },
            "improvement_ratio": improvement_ratio,
            "index_creation_time": create_index_time
        }
        
        print_success(f"索引性能测试完成")
        print_info(f"无索引时平均查询时间: {avg_time_before:.6f} 秒")
        print_info(f"有索引时平均查询时间: {avg_time_after:.6f} 秒")
        print_info(f"性能提升: {improvement_ratio:.2f}%")
        
        return result 

    def run_concurrent_test(self, num_threads: int = 10, operations_per_thread: int = 100) -> Dict[str, TestResult]:
        """执行并发测试"""
        print_info(f"开始并发性能测试: {num_threads} 个并发线程, 每线程 {operations_per_thread} 操作")
        
        # 检查PostgreSQL的max_connections设置
        max_conn_query = "SHOW max_connections"
        max_conn_result = self._execute_query_with_result(max_conn_query)
        max_connections = int(max_conn_result[0][0]) if max_conn_result else 100
        
        # 检查当前活跃连接数
        current_conn_query = "SELECT count(*) FROM pg_stat_activity"
        current_conn_result = self._execute_query_with_result(current_conn_query)
        current_connections = current_conn_result[0][0] if current_conn_result else 0
        
        # 计算可用连接数，保留一些给系统使用
        available_connections = max(0, max_connections - current_connections - 5)  # 保留5个连接给系统使用
        
        # 根据可用连接数调整线程数
        if num_threads > available_connections:
            original_threads = num_threads
            num_threads = max(1, min(available_connections, 20))  # 最多使用20个线程，至少使用1个
            print_warning(f"调整并发线程数从 {original_threads} 减少到 {num_threads}，以避免超出数据库最大连接数 ({max_connections})")
            
        # 如果连接数太少，显示警告并继续
        if num_threads < 5:
            print_warning(f"可用连接数较少，将使用 {num_threads} 个线程进行测试，测试结果可能不能完全反映并发性能")
        
        # 确保测试表中有足够的数据
        count_query = f"SELECT COUNT(*) FROM {self.test_schema}.{self.test_table}"
        count_result = self._execute_query_with_result(count_query)
        record_count = count_result[0][0] if count_result else 0
        
        if record_count < 100:
            print_warning(f"测试表中只有 {record_count} 条记录，建议先运行插入测试添加更多数据")
        
        # 获取ID列表用于测试
        id_query = f"SELECT id FROM {self.test_schema}.{self.test_table}"
        ids_result = self._execute_query_with_result(id_query)
        ids = [row[0] for row in ids_result] if ids_result else []
        
        if not ids:
            print_error("无法获取测试数据ID，请先运行插入测试")
            return {
                "select": TestResult(
                    operation="CONCURRENT_SELECT",
                    total_time=0, avg_time=0, min_time=0, max_time=0,
                    ops_per_second=0, success_count=0, error_count=num_threads * operations_per_thread
                ),
                "update": TestResult(
                    operation="CONCURRENT_UPDATE",
                    total_time=0, avg_time=0, min_time=0, max_time=0,
                    ops_per_second=0, success_count=0, error_count=num_threads * operations_per_thread
                ),
                "insert": TestResult(
                    operation="CONCURRENT_INSERT",
                    total_time=0, avg_time=0, min_time=0, max_time=0,
                    ops_per_second=0, success_count=0, error_count=num_threads * operations_per_thread
                )
            }
        
        # 准备用于每个线程的连接参数
        db_params = self.conn_params.copy()
        
        # 定义线程结果存储，使用线程安全的数据结构
        results_lock = threading.Lock()  # 添加锁，保护共享数据
        results_select = {"times": [], "success": 0, "error": 0}
        results_update = {"times": [], "success": 0, "error": 0}
        results_insert = {"times": [], "success": 0, "error": 0}
        
        # 使用列表进行线程同步，以确保正确的启动顺序
        threads_ready_count = [0]  # 使用列表，以便可以修改内部值
        threads_ready_lock = threading.Lock()
        threads_ready_event = threading.Event()
        start_event = threading.Event()
        ids_lock = threading.Lock()  # 用于保护共享的ID列表
        connection_errors = [0]  # 记录连接错误
        
        # 创建共享连接池
        connection_pool_lock = threading.Lock()
        connection_pool = []
        
        # 预先创建连接池
        print_info(f"正在预创建连接池...")
        for i in range(min(num_threads, 10)):  # 最多预创建10个连接
            try:
                conn = psycopg2.connect(**db_params)
                conn.set_session(autocommit=True)
                connection_pool.append(conn)
            except Exception as e:
                logger.warning(f"预创建连接失败: {e}")
                # 如果无法创建足够连接，调整线程数
                if i == 0:
                    print_error(f"无法创建数据库连接: {e}")
                    return {
                        "select": TestResult(
                            operation="CONCURRENT_SELECT",
                            total_time=0, avg_time=0, min_time=0, max_time=0,
                            ops_per_second=0, success_count=0, error_count=1
                        ),
                        "update": TestResult(
                            operation="CONCURRENT_UPDATE",
                            total_time=0, avg_time=0, min_time=0, max_time=0,
                            ops_per_second=0, success_count=0, error_count=1
                        ),
                        "insert": TestResult(
                            operation="CONCURRENT_INSERT",
                            total_time=0, avg_time=0, min_time=0, max_time=0,
                            ops_per_second=0, success_count=0, error_count=1
                        )
                    }
                break

        # 调整线程数为连接池大小
        if len(connection_pool) < num_threads and len(connection_pool) > 0:
            print_warning(f"只能创建 {len(connection_pool)} 个数据库连接，调整线程数为 {len(connection_pool)}")
            num_threads = len(connection_pool)
        
        def get_connection():
            """从连接池获取连接"""
            with connection_pool_lock:
                if connection_pool:
                    return connection_pool.pop()
                else:
                    # 如果连接池为空，尝试创建新连接
                    try:
                        conn = psycopg2.connect(**db_params)
                        conn.set_session(autocommit=True)
                        return conn
                    except Exception as e:
                        logger.error(f"创建新连接失败: {e}")
                        with results_lock:
                            connection_errors[0] += 1
                        return None
        
        def return_connection(conn):
            """将连接返回连接池"""
            if conn and not conn.closed:
                with connection_pool_lock:
                    connection_pool.append(conn)
        
        def worker_thread(thread_id, operations, shared_ids):
            """工作线程执行多种数据库操作"""
            # 从连接池获取连接
            conn = get_connection()
            if not conn:
                logger.error(f"线程 {thread_id} 无法获取数据库连接")
                with threads_ready_lock:
                    threads_ready_count[0] += 1
                    if threads_ready_count[0] >= num_threads:
                        threads_ready_event.set()
                return
            
            try:
                # 通知主线程这个工作线程已准备好
                with threads_ready_lock:
                    threads_ready_count[0] += 1
                    if threads_ready_count[0] >= num_threads:
                        threads_ready_event.set()
                
                # 等待开始信号
                start_event.wait()
                
                for i in range(operations):
                    # 随机选择操作类型: 60% 查询, 30% 更新, 10% 插入
                    op_type = random.choices(
                        ["select", "update", "insert"], 
                        weights=[0.6, 0.3, 0.1], 
                        k=1
                    )[0]
                    
                    # 线程安全地获取ID
                    with ids_lock:
                        if not shared_ids:
                            # 如果没有可用ID，跳转到插入操作
                            op_type = "insert"
                        else:
                            random_id = random.choice(shared_ids) if op_type != "insert" else None
                    
                    if op_type == "select" and random_id is not None:
                        # 执行查询
                        query = f"""
                        SELECT * FROM {self.test_schema}.{self.test_table}
                        WHERE id = %s
                        """
                        
                        start_time = time.time()
                        try:
                            with conn.cursor() as cursor:
                                cursor.execute(query, (random_id,))
                                result = cursor.fetchall()
                                
                            end_time = time.time()
                            with results_lock:
                                results_select["times"].append(end_time - start_time)
                                results_select["success"] += 1
                        except Exception as e:
                            logger.error(f"线程 {thread_id} 查询出错: {e}")
                            with results_lock:
                                results_select["error"] += 1
                    
                    elif op_type == "update" and random_id is not None:
                        # 执行更新
                        new_text = ''.join(random.choice(string.ascii_letters) for _ in range(20))
                        new_number = random.randint(1, 1000)
                        
                        query = f"""
                        UPDATE {self.test_schema}.{self.test_table}
                        SET text_data = %s, numeric_data = %s
                        WHERE id = %s
                        """
                        
                        start_time = time.time()
                        try:
                            with conn.cursor() as cursor:
                                cursor.execute(query, (new_text, new_number, random_id))
                                
                            end_time = time.time()
                            with results_lock:
                                results_update["times"].append(end_time - start_time)
                                results_update["success"] += 1
                        except Exception as e:
                            logger.error(f"线程 {thread_id} 更新出错: {e}")
                            with results_lock:
                                results_update["error"] += 1
                    
                    else:  # 插入
                        # 执行插入
                        text_data = ''.join(random.choice(string.ascii_letters) for _ in range(20))
                        numeric_data = random.randint(1, 1000)
                        random_data = ''.join(random.choice(string.ascii_letters) for _ in range(50))
                        
                        query = f"""
                        INSERT INTO {self.test_schema}.{self.test_table} 
                        (text_data, numeric_data, random_data)
                        VALUES (%s, %s, %s)
                        RETURNING id
                        """
                        
                        start_time = time.time()
                        try:
                            with conn.cursor() as cursor:
                                cursor.execute(query, (text_data, numeric_data, random_data))
                                new_id = cursor.fetchone()[0]
                                
                            end_time = time.time()
                            with results_lock:
                                results_insert["times"].append(end_time - start_time)
                                results_insert["success"] += 1
                            
                            # 线程安全地添加新ID到共享列表
                            if new_id:
                                with ids_lock:
                                    shared_ids.append(new_id)
                        except Exception as e:
                            logger.error(f"线程 {thread_id} 插入出错: {e}")
                            with results_lock:
                                results_insert["error"] += 1
            
            except Exception as e:
                logger.error(f"线程 {thread_id} 出错: {e}")
            finally:
                # 确保返回连接到连接池
                return_connection(conn)
        
        # 创建线程池
        threads = []
        start_time = time.time()
        
        print_info("正在启动工作线程...")
        
        # 启动所有线程
        with ProgressManager(total=num_threads, description="启动线程") as progress:
            for i in range(num_threads):
                thread = threading.Thread(
                    target=worker_thread,
                    args=(i, operations_per_thread, ids)
                )
                threads.append(thread)
                thread.start()
                progress.update()
        
        # 等待所有线程准备就绪
        print_info("等待所有线程准备就绪...")
        threads_ready_event.wait()
        
        # 如果有连接错误，检查是否还有足够的线程继续
        if connection_errors[0] > 0:
            active_threads = sum(1 for t in threads if t.is_alive())
            if active_threads == 0:
                print_error("所有线程都无法连接到数据库，终止测试")
                return {
                    "select": TestResult(
                        operation="CONCURRENT_SELECT",
                        total_time=0, avg_time=0, min_time=0, max_time=0,
                        ops_per_second=0, success_count=0, error_count=num_threads * operations_per_thread
                    ),
                    "update": TestResult(
                        operation="CONCURRENT_UPDATE",
                        total_time=0, avg_time=0, min_time=0, max_time=0,
                        ops_per_second=0, success_count=0, error_count=num_threads * operations_per_thread
                    ),
                    "insert": TestResult(
                        operation="CONCURRENT_INSERT",
                        total_time=0, avg_time=0, min_time=0, max_time=0,
                        ops_per_second=0, success_count=0, error_count=num_threads * operations_per_thread
                    )
                }
            else:
                print_warning(f"{connection_errors[0]} 个线程无法连接到数据库，将使用 {active_threads} 个线程继续测试")
        
        print_info(f"所有 {num_threads} 个线程已启动，开始执行并发测试...")
        
        # 记录开始时间并通知所有线程开始工作
        concurrent_start_time = time.time()
        start_event.set()
        
        # 等待所有线程完成
        with ProgressManager(total=len(threads), description="等待线程完成") as progress:
            for thread in threads:
                thread.join()
                progress.update()
        
        total_time = time.time() - concurrent_start_time
        
        # 关闭所有连接池连接
        for conn in connection_pool:
            try:
                if not conn.closed:
                    conn.close()
            except Exception as e:
                logger.warning(f"关闭连接池连接时出错: {e}")
        
        # 计算并发查询的统计数据
        results = {}
        
        # 处理SELECT结果
        if results_select["times"]:
            avg_time_select = sum(results_select["times"]) / len(results_select["times"])
            min_time_select = min(results_select["times"]) if results_select["times"] else 0
            max_time_select = max(results_select["times"]) if results_select["times"] else 0
            ops_per_second_select = results_select["success"] / total_time if total_time > 0 else 0
            
            results["select"] = TestResult(
                operation="CONCURRENT_SELECT",
                total_time=total_time,
                avg_time=avg_time_select,
                min_time=min_time_select,
                max_time=max_time_select,
                ops_per_second=ops_per_second_select,
                success_count=results_select["success"],
                error_count=results_select["error"]
            )
        else:
            results["select"] = TestResult(
                operation="CONCURRENT_SELECT",
                total_time=0, avg_time=0, min_time=0, max_time=0,
                ops_per_second=0, success_count=0, error_count=0
            )
            
        # 处理UPDATE结果
        if results_update["times"]:
            avg_time_update = sum(results_update["times"]) / len(results_update["times"])
            min_time_update = min(results_update["times"]) if results_update["times"] else 0
            max_time_update = max(results_update["times"]) if results_update["times"] else 0
            ops_per_second_update = results_update["success"] / total_time if total_time > 0 else 0
            
            results["update"] = TestResult(
                operation="CONCURRENT_UPDATE",
                total_time=total_time,
                avg_time=avg_time_update,
                min_time=min_time_update,
                max_time=max_time_update,
                ops_per_second=ops_per_second_update,
                success_count=results_update["success"],
                error_count=results_update["error"]
            )
        else:
            results["update"] = TestResult(
                operation="CONCURRENT_UPDATE",
                total_time=0, avg_time=0, min_time=0, max_time=0,
                ops_per_second=0, success_count=0, error_count=0
            )
            
        # 处理INSERT结果
        if results_insert["times"]:
            avg_time_insert = sum(results_insert["times"]) / len(results_insert["times"])
            min_time_insert = min(results_insert["times"]) if results_insert["times"] else 0
            max_time_insert = max(results_insert["times"]) if results_insert["times"] else 0
            ops_per_second_insert = results_insert["success"] / total_time if total_time > 0 else 0
            
            results["insert"] = TestResult(
                operation="CONCURRENT_INSERT",
                total_time=total_time,
                avg_time=avg_time_insert,
                min_time=min_time_insert,
                max_time=max_time_insert,
                ops_per_second=ops_per_second_insert,
                success_count=results_insert["success"],
                error_count=results_insert["error"]
            )
        else:
            results["insert"] = TestResult(
                operation="CONCURRENT_INSERT",
                total_time=0, avg_time=0, min_time=0, max_time=0,
                ops_per_second=0, success_count=0, error_count=0
            )
        
        # 保存并发测试结果
        self.report_data["concurrent_test"] = results
        
        # 计算总体统计
        total_ops = (results_select["success"] + results_update["success"] + results_insert["success"])
        total_errors = (results_select["error"] + results_update["error"] + results_insert["error"])
        total_ops_per_second = total_ops / total_time if total_time > 0 else 0
        
        print_success(f"并发测试完成，总时间: {total_time:.2f} 秒")
        print_info(f"总计成功操作: {total_ops}, 错误: {total_errors}")
        print_info(f"每秒操作数: {total_ops_per_second:.2f}")
        print_info(f"- 查询: 成功 {results_select['success']}, 失败 {results_select['error']}")
        print_info(f"- 更新: 成功 {results_update['success']}, 失败 {results_update['error']}")
        print_info(f"- 插入: 成功 {results_insert['success']}, 失败 {results_insert['error']}")
        
        return results
    
    def generate_report(self) -> Table:
        """生成性能测试报告"""
        print_info("生成性能测试报告...")
        
        # 创建报告表格
        report_table = Table(title="PostgreSQL性能测试报告", box=ROUNDED)
        report_table.add_column("操作类型", style="cyan")
        report_table.add_column("总时间(秒)", style="green", justify="right")
        report_table.add_column("平均时间(秒)", style="green", justify="right")
        report_table.add_column("每秒操作数", style="yellow", justify="right")
        report_table.add_column("成功", style="green", justify="right")
        report_table.add_column("失败", style="red", justify="right")
        
        # 添加常规操作结果
        for operation_name, result_key in [
            ("插入操作", "insert_test"),
            ("查询操作", "select_test"),
            ("更新操作", "update_test")
        ]:
            if result_key in self.report_data:
                result = self.report_data[result_key]
                report_table.add_row(
                    operation_name,
                    f"{result.total_time:.3f}",
                    f"{result.avg_time:.6f}",
                    f"{result.ops_per_second:.2f}",
                    str(result.success_count),
                    str(result.error_count)
                )
        
        # 添加索引测试结果
        if "index_test" in self.report_data:
            index_data = self.report_data["index_test"]
            before = index_data["before_index"]
            after = index_data["after_index"]
            improvement = index_data["improvement_ratio"]
            
            report_table.add_row(
                "索引前查询",
                "-",
                f"{before['avg_time']:.6f}",
                f"{before['ops_per_second']:.2f}",
                "-",
                "-"
            )
            
            report_table.add_row(
                "索引后查询",
                "-",
                f"{after['avg_time']:.6f}",
                f"{after['ops_per_second']:.2f}",
                "-",
                "-"
            )
            
            report_table.add_row(
                f"索引提升(+{improvement:.2f}%)",
                f"{index_data['index_creation_time']:.3f}",
                "-",
                "-",
                "-",
                "-"
            )
        
        # 添加并发测试结果
        if "concurrent_test" in self.report_data:
            concurrent_data = self.report_data["concurrent_test"]
            
            if "select" in concurrent_data:
                result = concurrent_data["select"]
                report_table.add_row(
                    "并发查询",
                    f"{result.total_time:.3f}",
                    f"{result.avg_time:.6f}",
                    f"{result.ops_per_second:.2f}",
                    str(result.success_count),
                    str(result.error_count)
                )
                
            if "update" in concurrent_data:
                result = concurrent_data["update"]
                report_table.add_row(
                    "并发更新",
                    f"{result.total_time:.3f}",
                    f"{result.avg_time:.6f}",
                    f"{result.ops_per_second:.2f}",
                    str(result.success_count),
                    str(result.error_count)
                )
                
            if "insert" in concurrent_data:
                result = concurrent_data["insert"]
                report_table.add_row(
                    "并发插入",
                    f"{result.total_time:.3f}",
                    f"{result.avg_time:.6f}",
                    f"{result.ops_per_second:.2f}",
                    str(result.success_count),
                    str(result.error_count)
                )
        
        # 添加数据库版本信息
        version_info = "未知"
        if hasattr(self, 'version_info') and self.version_info:
            # 提取版本的第一部分
            version_parts = self.version_info.split(",")
            if version_parts:
                version_info = version_parts[0].strip()
        
        report_table.caption = f"测试数据库: {version_info}"
            
        return report_table
    
    def display_report(self):
        """显示性能测试报告"""
        report_table = self.generate_report()
        console.print("\n")
        console.print(report_table)
        console.print("\n")

class PostgresqlPerformanceManager:
    """PostgreSQL性能测试管理类，用于集成到菜单系统"""
    
    def __init__(self):
        """初始化性能测试管理器"""
        self.tester = PostgresqlPerformanceTester()
        
    def start_performance_test(self):
        """启动性能测试"""
        print_info("准备启动PostgreSQL性能测试...")
        
        # 连接数据库
        if not self._check_connection_settings():
            return
            
        # 创建测试环境
        if not self.tester.setup_test_environment():
            print_error("创建测试环境失败，中止测试")
            return
            
        try:
            # 执行插入测试
            records = self._get_positive_int_input("请输入插入测试的记录数", 1000)
            batch_size = self._get_positive_int_input("请输入批量插入大小", 100)
            self.tester.run_insert_test(records, batch_size)
            
            # 执行查询测试
            iterations = self._get_positive_int_input("请输入查询测试的迭代次数", 1000)
            self.tester.run_select_test(iterations)
            
            # 执行更新测试
            iterations = self._get_positive_int_input("请输入更新测试的迭代次数", 1000)
            self.tester.run_update_test(iterations)
            
            # 执行索引性能测试
            if confirm("是否执行索引性能测试?", True):
                self.tester.run_index_performance_test()
            
            # 执行并发测试
            if confirm("是否执行并发性能测试?", True):
                threads = self._get_positive_int_input("请输入并发线程数", 10)
                ops_per_thread = self._get_positive_int_input("请输入每线程的操作数", 100)
                self.tester.run_concurrent_test(threads, ops_per_thread)
            
            # 显示报告
            self.tester.display_report()
            
        except KeyboardInterrupt:
            print_warning("\n测试被中断")
        except Exception as e:
            print_error(f"测试过程中出错: {e}")
            logger.exception("性能测试出错")
        finally:
            # 清理测试环境
            if confirm("是否清理测试环境?", True):
                self.tester.cleanup_test_environment()
            else:
                print_warning("测试环境未清理，测试数据仍保留在数据库中")
                print_info(f"可以使用以下SQL手动清理: DROP SCHEMA IF EXISTS {self.tester.test_schema} CASCADE;")
    
    def _check_connection_settings(self) -> bool:
        """检查并配置数据库连接设置"""
        if self.tester.conn and not self.tester.conn.closed:
            print_success("已连接到数据库")
            return True
            
        print_info("配置数据库连接...")
        host = get_input("请输入数据库主机", "localhost")
        port = get_input("请输入数据库端口", "5432")
        dbname = get_input("请输入数据库名", "postgres")
        user = get_input("请输入用户名", "postgres")
        password = get_input("请输入密码(留空则无密码)")
        
        try:
            port = int(port)
        except ValueError:
            print_error(f"无效的端口号 '{port}'，使用默认值 5432")
            port = 5432
            
        print_info(f"正在连接到 {host}:{port}/{dbname} 数据库...")
        
        if self.tester.connect(host, port, dbname, user, password or None):
            print_success("成功连接到数据库")
            return True
        else:
            print_error("连接数据库失败")
            return False
    
    def _get_positive_int_input(self, prompt: str, default: int) -> int:
        """获取正整数输入"""
        while True:
            value = get_input(prompt, str(default))
            try:
                int_value = int(value)
                if int_value > 0:
                    return int_value
                print_error("请输入大于0的整数")
            except ValueError:
                print_error("请输入有效的整数")

def main():
    """性能测试模块主函数"""
    manager = PostgresqlPerformanceManager()
    manager.start_performance_test()

if __name__ == "__main__":
    main() 