#!/usr/bin/env python3
"""
基金净值数据库性能测试脚本
测试优化后的插入和查询性能
"""

import time
import sys
import os
from datetime import datetime, timedelta
from typing import List, Dict, Any

# 添加当前目录到Python路径
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, current_dir)

from fund_nav_scraper import FundNavScraper
from config import get_database_config

def generate_test_data(fund_code: str, record_count: int) -> List[Dict[str, Any]]:
    """生成测试数据"""
    test_data = []
    base_date = datetime(2024, 1, 1)
    
    for i in range(record_count):
        nav_date = base_date + timedelta(days=i)
        test_data.append({
            'fund_code': fund_code,
            'nav_date': nav_date.strftime('%Y-%m-%d'),
            'unit_net_value': 1.0000 + i * 0.0001,
            'accumulated_net_value': 1.0000 + i * 0.0001,
            'daily_growth_rate': 0.01 if i % 2 == 0 else -0.01,
            'purchase_status': '开放申购',
            'redemption_status': '开放赎回',
            'dividend_info': None
        })
    
    return test_data

def test_insert_performance(scraper: FundNavScraper):
    """测试插入性能"""
    print("=" * 60)
    print("插入性能测试")
    print("=" * 60)
    
    test_cases = [
        (100, "小批量"),
        (1000, "中批量"),
        (5000, "大批量")
    ]
    
    for record_count, description in test_cases:
        print(f"\n{description}插入测试 ({record_count} 条记录):")
        
        # 生成测试数据
        test_fund_code = f"TEST{record_count:04d}"
        test_data = generate_test_data(test_fund_code, record_count)
        
        # 执行插入测试
        start_time = time.time()
        result = scraper.save_to_database(test_data)
        end_time = time.time()
        
        total_time = end_time - start_time
        speed = record_count / total_time if total_time > 0 else 0
        
        print(f"  结果: {'成功' if result else '失败'}")
        print(f"  耗时: {total_time:.3f}秒")
        print(f"  速度: {speed:.0f}条/秒")
        
        # 验证插入结果
        verify_result = scraper.query_fund_nav_by_code(test_fund_code, limit=5)
        if verify_result['success']:
            print(f"  验证: 成功查询到{len(verify_result['data'])}条记录")
        else:
            print(f"  验证: 失败 - {verify_result.get('error', '未知错误')}")

def test_query_performance(scraper: FundNavScraper):
    """测试查询性能"""
    print("\n" + "=" * 60)
    print("查询性能测试")
    print("=" * 60)
    
    test_cases = [
        {
            'name': '单基金查询',
            'params': {'fund_code': '000001', 'page_size': 100},
            'description': '查询单个基金的历史数据'
        },
        {
            'name': '日期范围查询',
            'params': {'start_date': '2024-01-01', 'end_date': '2024-01-31', 'page_size': 200},
            'description': '查询指定日期范围的数据'
        },
        {
            'name': '基金+日期查询',
            'params': {'fund_code': '000001', 'start_date': '2024-01-01', 'end_date': '2024-12-31', 'page_size': 300},
            'description': '查询指定基金在指定日期范围的数据'
        },
        {
            'name': '大页面查询',
            'params': {'fund_code': '000001', 'page_size': 1000},
            'description': '查询大页面数据'
        }
    ]
    
    for test_case in test_cases:
        print(f"\n{test_case['name']}:")
        print(f"  描述: {test_case['description']}")
        
        # 执行查询测试
        start_time = time.time()
        result = scraper.query_fund_nav_history(**test_case['params'])
        end_time = time.time()
        
        total_time = end_time - start_time
        
        print(f"  结果: {'成功' if result['success'] else '失败'}")
        print(f"  耗时: {total_time:.3f}秒")
        
        if result['success']:
            print(f"  返回记录数: {len(result['data'])}")
            print(f"  总记录数: {result['total']}")
            if total_time > 0:
                print(f"  查询速度: {len(result['data']) / total_time:.0f}条/秒")
        else:
            print(f"  错误: {result.get('error', '未知错误')}")

def test_fund_codes_filtering(scraper: FundNavScraper):
    """测试基金代码过滤功能"""
    print("\n" + "=" * 60)
    print("基金代码过滤测试")
    print("=" * 60)
    
    # 测试获取基金代码（带过滤）
    print("\n测试fund_download_status过滤功能:")
    
    start_time = time.time()
    fund_codes = scraper.get_fund_codes_from_db(limit=50)
    end_time = time.time()
    
    print(f"  获取耗时: {end_time - start_time:.3f}秒")
    print(f"  获取基金数量: {len(fund_codes)}")
    print(f"  前10个基金代码: {fund_codes[:10]}")
    
    # 测试缓存功能
    print("\n测试缓存功能:")
    start_time = time.time()
    cached_fund_codes = scraper.get_fund_codes_from_db(limit=50)
    end_time = time.time()
    
    print(f"  缓存获取耗时: {end_time - start_time:.3f}秒")
    print(f"  缓存一致性: {'通过' if fund_codes == cached_fund_codes else '失败'}")

def main():
    """主函数"""
    print("基金净值数据库性能测试")
    print(f"测试时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
    
    try:
        # 初始化爬虫
        db_config = get_database_config()
        scraper = FundNavScraper(db_config)
        
        # 执行各项测试
        test_fund_codes_filtering(scraper)
        test_insert_performance(scraper)
        test_query_performance(scraper)
        
        print("\n" + "=" * 60)
        print("性能测试完成")
        print("=" * 60)
        
    except Exception as e:
        print(f"测试过程中发生错误: {str(e)}")
        import traceback
        traceback.print_exc()

if __name__ == '__main__':
    main()