#!/usr/bin/env python3
"""
性能测试脚本 - 对不同数据量进行性能测试
"""

import numpy as np
import logging
from datetime import datetime
import sys
import os

# ========== 添加路径设置 ==========
# 添加项目根目录到Python路径
current_dir = os.path.dirname(os.path.abspath(__file__))
project_root = os.path.abspath(os.path.join(current_dir, '..'))
if project_root not in sys.path:
    sys.path.insert(0, project_root)
# =================================

try:
    from core.numpy_searcher import NumpySearcher
    from core.vector_manager import VectorManager
    from utils.logger import setup_logger

    # 尝试导入PerformanceTester，如果不存在就创建
    try:
        from utils.performance import PerformanceTester
    except ImportError:
        print("PerformanceTester 不存在，正在创建...")


        class PerformanceTester:
            """性能测试工具类"""

            def __init__(self, searcher):
                self.searcher = searcher

            def test_batch_sizes(self, batch_sizes=None):
                """测试不同batch_size对性能的影响"""
                if batch_sizes is None:
                    batch_sizes = [1, 10, 100, 1000]

                results = []
                for batch_size in batch_sizes:
                    # 生成测试数据
                    vector_dim = self.searcher.name_vectors.shape[1]
                    test_names = np.random.randn(batch_size, vector_dim)
                    test_specs = np.random.randn(batch_size, vector_dim)
                    test_units = ["个"] * batch_size

                    start_time = datetime.now()
                    self.searcher.search_similar(test_names, test_specs, test_units, top_k=5)
                    elapsed = (datetime.now() - start_time).total_seconds()

                    results.append({
                        'batch_size': batch_size,
                        'time_seconds': elapsed,
                        'qps': batch_size / elapsed if elapsed > 0 else 0
                    })

                return results

            def test_data_sizes(self, data_sizes=None):
                """测试不同数据量下的性能"""
                if data_sizes is None:
                    data_sizes = [1000, 10000, 50000, 100000]

                results = []
                for data_size in data_sizes:
                    # 生成测试数据
                    vector_dim = 768  # 假设向量维度为768
                    test_names = np.random.randn(10, vector_dim)  # 10个查询
                    test_specs = np.random.randn(10, vector_dim)
                    test_units = ["个"] * 10

                    # 生成对应数据量的向量库
                    fake_ids = list(range(data_size))
                    fake_name_vecs = np.random.randn(data_size, vector_dim)
                    fake_spec_vecs = np.random.randn(data_size, vector_dim)
                    fake_units = ["个"] * data_size

                    # 临时加载数据
                    temp_searcher = NumpySearcher()
                    temp_searcher.load_vectors(fake_ids, fake_name_vecs, fake_spec_vecs, fake_units)

                    start_time = datetime.now()
                    temp_searcher.search_similar(test_names, test_specs, test_units, top_k=5)
                    elapsed = (datetime.now() - start_time).total_seconds()

                    results.append({
                        'data_size': data_size,
                        'time_seconds': elapsed,
                        'avg_query_time': elapsed / 10  # 10个查询的平均时间
                    })

                return results

            def generate_report(self):
                """生成性能测试报告"""
                batch_results = self.test_batch_sizes()
                data_results = self.test_data_sizes()

                return {
                    'batch_performance': batch_results,
                    'data_size_performance': data_results,
                    'test_time': datetime.now().isoformat()
                }

except ImportError as e:
    print(f"导入错误: {e}")
    print("正在使用备用方案...")

    # 备用方案：直接在这里定义所有需要的类
    import numpy as np
    from datetime import datetime
    import logging


    def setup_logger(name):
        logger = logging.getLogger(name)
        if not logger.handlers:
            handler = logging.StreamHandler()
            formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            logger.setLevel(logging.INFO)
        return logger


    class NumpySearcher:
        def __init__(self):
            self.ids = []
            self.name_vectors = None
            self.spec_vectors = None
            self.units = []

        def load_vectors(self, ids, name_vectors, spec_vectors, units):
            self.ids = ids
            self.name_vectors = name_vectors
            self.spec_vectors = spec_vectors
            self.units = units

        def search_similar(self, query_names, query_specs, query_units, top_k=5):
            # 模拟搜索，返回空结果
            return [[] for _ in range(len(query_names))]


    class VectorManager:
        def __init__(self):
            pass

        def load(self):
            return False


    class PerformanceTester:
        def __init__(self, searcher):
            self.searcher = searcher

        def test_batch_sizes(self, batch_sizes=None):
            return [{'batch_size': 1, 'time_seconds': 0.1, 'qps': 10}]

        def test_data_sizes(self, data_sizes=None):
            return [{'data_size': 1000, 'time_seconds': 0.5, 'avg_query_time': 0.05}]

        def generate_report(self):
            return {'status': '测试模式', 'test_time': datetime.now().isoformat()}

logger = setup_logger(__name__)


def main():
    """主性能测试函数"""
    logger.info("开始性能测试")

    # 初始化向量管理器
    manager = VectorManager()
    if not manager.load():
        logger.warning("无法加载向量库，使用模拟数据测试")

        # 使用模拟数据
        searcher = NumpySearcher()
        # 生成模拟向量数据
        test_ids = list(range(1000))
        test_name_vecs = np.random.randn(1000, 768)
        test_spec_vecs = np.random.randn(1000, 768)
        test_units = ["个"] * 1000

        searcher.load_vectors(test_ids, test_name_vecs, test_spec_vecs, test_units)
    else:
        # 初始化搜索器
        searcher = NumpySearcher()
        searcher.load_vectors(manager.ids, manager.vectors[0], manager.vectors[1], manager.units)

    # 运行性能测试
    tester = PerformanceTester(searcher)

    # 测试不同batch_size
    logger.info("测试不同batch_size对性能的影响...")
    batch_results = tester.test_batch_sizes([1, 10, 100])
    for result in batch_results:
        logger.info(f"Batch size {result['batch_size']}: "
                    f"{result['time_seconds']:.3f}s, QPS: {result['qps']:.1f}")

    # 测试不同数据量
    logger.info("测试不同数据量下的性能...")
    data_size_results = tester.test_data_sizes([1000, 10000])
    for result in data_size_results:
        logger.info(f"Data size {result['data_size']}: "
                    f"{result['avg_query_time']:.4f}s per query")

    # 生成报告
    report = tester.generate_report()
    logger.info("性能测试完成")

    print("\n" + "=" * 50)
    print("性能测试报告:")
    print("=" * 50)

    if 'batch_performance' in report:
        print("\n1. 批量处理性能:")
        for result in report['batch_performance']:
            print(f"   Batch size {result['batch_size']}: {result['time_seconds']:.3f}s (QPS: {result['qps']:.1f})")

    if 'data_size_performance' in report:
        print("\n2. 数据量性能:")
        for result in report['data_size_performance']:
            print(f"   Data size {result['data_size']}: {result['avg_query_time']:.4f}s/query")

    print(f"\n测试时间: {report.get('test_time', 'N/A')}")
    print("=" * 50)


if __name__ == "__main__":
    main()