#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
性能基准测试
测试系统在不同负载下的性能表现
"""

import unittest
import time
import tempfile
import os
import sys
import psutil
from unittest.mock import patch

# 添加src目录到Python路径
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

from src.main import MigrationTool
from src.performance import PerformanceProfiler, performance_profiler


class TestPerformanceBenchmark(unittest.TestCase):
    """性能基准测试"""
    
    def setUp(self):
        self.migration_tool = MigrationTool()
        self.temp_dir = tempfile.mkdtemp()
        
    def tearDown(self):
        import shutil
        shutil.rmtree(self.temp_dir, ignore_errors=True)
        
    def test_small_file_performance(self):
        """测试小文件处理性能"""
        # 创建小文件（10个表）
        test_file = os.path.join(self.temp_dir, "small_test.sql")
        with open(test_file, 'w', encoding='utf-8') as f:
            f.write("CREATE DATABASE small_test;\nUSE small_test;\n")
            for i in range(10):
                f.write(f"""
CREATE TABLE table_{i} (
    id INT AUTO_INCREMENT PRIMARY KEY,
    name VARCHAR(100) NOT NULL,
    email VARCHAR(255),
    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
INSERT INTO table_{i} (name, email) VALUES 
    ('User{i}_1', 'user{i}_1@test.com'),
    ('User{i}_2', 'user{i}_2@test.com');
""")
        
        output_file = os.path.join(self.temp_dir, "small_test_kwdb.sql")
        
        # 性能测试
        start_time = time.time()
        start_memory = psutil.Process().memory_info().rss / 1024 / 1024
        
        success = self.migration_tool.migrate(
            input_file=test_file,
            output_file=output_file,
            validate=True,
            generate_reports=True
        )
        
        end_time = time.time()
        end_memory = psutil.Process().memory_info().rss / 1024 / 1024
        
        # 验证结果
        self.assertTrue(success, "小文件迁移应该成功")
        self.assertTrue(os.path.exists(output_file), "输出文件应该存在")
        
        # 性能指标
        process_time = end_time - start_time
        memory_usage = end_memory - start_memory
        
        print(f"\n小文件性能指标:")
        print(f"  处理时间: {process_time:.2f}秒")
        print(f"  内存使用: {memory_usage:.1f}MB")
        print(f"  文件大小: {os.path.getsize(test_file) / 1024:.1f}KB")
        
        # 性能要求
        self.assertLess(process_time, 10, f"小文件处理时间应该小于10秒，实际{process_time:.2f}秒")
        self.assertLess(memory_usage, 100, f"内存使用应该小于100MB，实际{memory_usage:.1f}MB")
        
    def test_medium_file_performance(self):
        """测试中等文件处理性能"""
        # 创建中等文件（100个表）
        test_file = os.path.join(self.temp_dir, "medium_test.sql")
        with open(test_file, 'w', encoding='utf-8') as f:
            f.write("CREATE DATABASE medium_test;\nUSE medium_test;\n")
            for i in range(100):
                f.write(f"""
CREATE TABLE table_{i:03d} (
    id INT AUTO_INCREMENT PRIMARY KEY,
    name VARCHAR(100) NOT NULL,
    description TEXT,
    status ENUM('active', 'inactive') DEFAULT 'active',
    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
    INDEX idx_name (name),
    INDEX idx_status (status)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
""")
                
                # 添加一些数据
                if i % 10 == 0:  # 每10个表添加数据
                    f.write(f"INSERT INTO table_{i:03d} (name, description) VALUES ")
                    values = []
                    for j in range(20):
                        values.append(f"('Name_{i}_{j}', 'Description for {i}_{j}')")
                    f.write(", ".join(values) + ";\n")
        
        output_file = os.path.join(self.temp_dir, "medium_test_kwdb.sql")
        
        # 性能测试
        start_time = time.time()
        start_memory = psutil.Process().memory_info().rss / 1024 / 1024
        
        success = self.migration_tool.migrate(
            input_file=test_file,
            output_file=output_file,
            validate=True,
            generate_reports=False  # 跳过报告生成以节省时间
        )
        
        end_time = time.time()
        end_memory = psutil.Process().memory_info().rss / 1024 / 1024
        
        # 验证结果
        self.assertTrue(success, "中等文件迁移应该成功")
        self.assertTrue(os.path.exists(output_file), "输出文件应该存在")
        
        # 性能指标
        process_time = end_time - start_time
        memory_usage = end_memory - start_memory
        file_size_mb = os.path.getsize(test_file) / 1024 / 1024
        
        print(f"\n中等文件性能指标:")
        print(f"  处理时间: {process_time:.2f}秒")
        print(f"  内存使用: {memory_usage:.1f}MB")
        print(f"  文件大小: {file_size_mb:.1f}MB")
        print(f"  处理速度: {file_size_mb/process_time:.1f}MB/s")
        
        # 性能要求
        self.assertLess(process_time, 30, f"中等文件处理时间应该小于30秒，实际{process_time:.2f}秒")
        self.assertLess(memory_usage, 200, f"内存使用应该小于200MB，实际{memory_usage:.1f}MB")
        
    def test_concurrent_processing_performance(self):
        """测试并发处理性能"""
        import threading
        from concurrent.futures import ThreadPoolExecutor, as_completed
        
        # 创建多个小文件
        test_files = []
        for i in range(5):
            test_file = os.path.join(self.temp_dir, f"concurrent_test_{i}.sql")
            with open(test_file, 'w', encoding='utf-8') as f:
                f.write(f"CREATE DATABASE concurrent_test_{i};\nUSE concurrent_test_{i};\n")
                for j in range(20):
                    f.write(f"""
CREATE TABLE table_{j} (
    id INT AUTO_INCREMENT PRIMARY KEY,
    data VARCHAR(255) DEFAULT 'test_data_{i}_{j}'
) ENGINE=InnoDB;
INSERT INTO table_{j} (data) VALUES ('sample_{i}_{j}');
""")
            test_files.append(test_file)
        
        # 并发处理函数
        def process_file(input_file):
            output_file = input_file.replace('.sql', '_kwdb.sql')
            tool = MigrationTool()  # 每个线程使用独立的工具实例
            start_time = time.time()
            success = tool.migrate(
                input_file=input_file,
                output_file=output_file,
                validate=False,  # 跳过验证以提高速度
                generate_reports=False
            )
            end_time = time.time()
            return success, end_time - start_time
        
        # 性能测试
        start_time = time.time()
        start_memory = psutil.Process().memory_info().rss / 1024 / 1024
        
        # 并发执行
        with ThreadPoolExecutor(max_workers=3) as executor:
            futures = [executor.submit(process_file, file) for file in test_files]
            results = [future.result() for future in as_completed(futures)]
        
        end_time = time.time()
        end_memory = psutil.Process().memory_info().rss / 1024 / 1024
        
        # 验证结果
        successful_count = sum(1 for success, _ in results if success)
        total_process_time = sum(process_time for _, process_time in results)
        concurrent_time = end_time - start_time
        
        print(f"\n并发处理性能指标:")
        print(f"  成功处理: {successful_count}/{len(test_files)}个文件")
        print(f"  并发总时间: {concurrent_time:.2f}秒")
        print(f"  串行总时间: {total_process_time:.2f}秒")
        print(f"  并发效率: {total_process_time/concurrent_time:.1f}x")
        print(f"  内存使用: {end_memory - start_memory:.1f}MB")
        
        # 性能要求
        self.assertGreaterEqual(successful_count, 4, "应该至少成功处理4个文件")
        self.assertLess(concurrent_time, total_process_time * 0.8, "并发处理应该比串行快")
        
    def test_memory_efficiency(self):
        """测试内存使用效率"""
        # 创建内存密集型测试文件
        test_file = os.path.join(self.temp_dir, "memory_test.sql")
        with open(test_file, 'w', encoding='utf-8') as f:
            f.write("CREATE DATABASE memory_test;\nUSE memory_test;\n")
            
            # 创建包含大量数据的表
            for i in range(50):
                f.write(f"""
CREATE TABLE large_table_{i} (
    id INT AUTO_INCREMENT PRIMARY KEY,
    data1 VARCHAR(1000),
    data2 TEXT,
    data3 LONGTEXT
) ENGINE=InnoDB;
""")
                
                # 添加大量INSERT语句
                f.write(f"INSERT INTO large_table_{i} (data1, data2, data3) VALUES ")
                values = []
                for j in range(100):
                    data1 = f"'{'x' * 100}_{i}_{j}'"
                    data2 = f"'{'y' * 500}_{i}_{j}'"
                    data3 = f"'{'z' * 1000}_{i}_{j}'"
                    values.append(f"({data1}, {data2}, {data3})")
                f.write(", ".join(values) + ";\n")
        
        output_file = os.path.join(self.temp_dir, "memory_test_kwdb.sql")
        
        # 监控内存使用
        process = psutil.Process()
        start_memory = process.memory_info().rss / 1024 / 1024
        peak_memory = start_memory
        
        def monitor_memory():
            nonlocal peak_memory
            while True:
                try:
                    current_memory = process.memory_info().rss / 1024 / 1024
                    peak_memory = max(peak_memory, current_memory)
                    time.sleep(0.1)
                except:
                    break
        
        import threading
        monitor_thread = threading.Thread(target=monitor_memory, daemon=True)
        monitor_thread.start()
        
        # 执行迁移
        start_time = time.time()
        success = self.migration_tool.migrate(
            input_file=test_file,
            output_file=output_file,
            validate=False,
            generate_reports=False
        )
        end_time = time.time()
        
        end_memory = process.memory_info().rss / 1024 / 1024
        
        # 验证结果
        file_size_mb = os.path.getsize(test_file) / 1024 / 1024
        memory_usage = peak_memory - start_memory
        
        print(f"\n内存效率指标:")
        print(f"  文件大小: {file_size_mb:.1f}MB")
        print(f"  峰值内存: {memory_usage:.1f}MB")
        print(f"  内存效率: {file_size_mb/memory_usage:.2f} (文件大小/内存使用)")
        print(f"  处理时间: {end_time - start_time:.2f}秒")
        
        # 性能要求
        self.assertTrue(success, "大文件迁移应该成功")
        self.assertLess(memory_usage, file_size_mb * 3, "内存使用不应该超过文件大小的3倍")
        
    def test_performance_profiler_integration(self):
        """测试性能分析器集成"""
        # 创建测试文件
        test_file = os.path.join(self.temp_dir, "profiler_test.sql")
        with open(test_file, 'w', encoding='utf-8') as f:
            f.write("""
CREATE DATABASE profiler_test;
USE profiler_test;
CREATE TABLE users (
    id INT AUTO_INCREMENT PRIMARY KEY,
    name VARCHAR(100),
    email VARCHAR(255)
) ENGINE=InnoDB;
INSERT INTO users (name, email) VALUES 
    ('User1', 'user1@test.com'),
    ('User2', 'user2@test.com');
""")
        
        output_file = os.path.join(self.temp_dir, "profiler_test_kwdb.sql")
        
        # 执行迁移（会自动启用性能监控）
        success = self.migration_tool.migrate(
            input_file=test_file,
            output_file=output_file,
            validate=True,
            generate_reports=True
        )
        
        # 验证结果
        self.assertTrue(success, "迁移应该成功")
        
        # 检查性能报告文件
        performance_file = output_file.replace('.sql', '_performance.json')
        self.assertTrue(os.path.exists(performance_file), "性能报告文件应该存在")
        
        # 验证性能报告内容
        import json
        with open(performance_file, 'r', encoding='utf-8') as f:
            performance_data = json.load(f)
        
        self.assertIn('total_time', performance_data, "应该包含总时间")
        self.assertIn('peak_memory', performance_data, "应该包含峰值内存")
        self.assertGreater(performance_data['total_time'], 0, "总时间应该大于0")
        self.assertGreater(performance_data['peak_memory'], 0, "峰值内存应该大于0")
        
        print(f"\n性能分析器集成测试:")
        print(f"  总时间: {performance_data['total_time']:.2f}秒")
        print(f"  峰值内存: {performance_data['peak_memory']:.1f}MB")


if __name__ == '__main__':
    # 运行性能基准测试
    unittest.main(verbosity=2)
