"""
计算新数据配置下的容量
"""
from config import Config
from data_loader import DataLoader

def calculate_new_capacity():
    """计算新数据配置下的容量"""
    config = Config()
    
    print("=== 新数据配置分析 ===")
    print(f"总文件数: {config.TOTAL_DATA}")
    print(f"批次大小: {config.BATCH_SIZE}")
    print(f"序列长度: {config.SEQ_LEN}")
    print(f"滑动步长: {config.SLIDING_STEP}")
    
    # 创建数据加载器
    data_loader = DataLoader(config, sliding_step=config.SLIDING_STEP)
    
    # 打印详细信息
    data_loader.print_sliding_info()
    
    # 计算实际可用的批次数
    max_batches = data_loader.get_max_batches()
    total_sequences = data_loader.get_total_sequences()
    
    print(f"\n=== 数据容量对比 ===")
    
    # 原始配置对比
    original_total_data = 1100
    original_batch_size = 20
    original_sequences = original_total_data // config.SEQ_LEN
    
    print(f"原始配置 (1100文件, 批次大小20):")
    print(f"  原始序列数: {original_sequences}")
    print(f"  原始批次数: {original_sequences // original_batch_size}")
    print(f"  原始训练样本: {original_sequences}")
    
    print(f"\n新配置 (2117文件, 批次大小4, 滑动步长1):")
    print(f"  滑动窗口序列数: {total_sequences}")
    print(f"  总批次数: {max_batches}")
    print(f"  数据增强倍数: {total_sequences / original_sequences:.1f}x")
    
    # 建议训练/测试分割
    test_ratios = [0.1, 0.15, 0.2]
    
    print(f"\n=== 训练/测试分割建议 ===")
    
    for ratio in test_ratios:
        test_batches = max(2, int(max_batches * ratio))
        train_batches = max_batches - test_batches
        train_samples = train_batches * config.BATCH_SIZE
        test_samples = test_batches * config.BATCH_SIZE
        
        print(f"\n测试比例 {ratio*100:.0f}%:")
        print(f"  训练批次: {train_batches}")
        print(f"  测试批次: {test_batches}")
        print(f"  训练样本数: {train_samples}")
        print(f"  测试样本数: {test_samples}")
    
    # 推荐配置
    recommended_test_ratio = 0.15  # 15%用于测试
    recommended_test_batches = max(2, int(max_batches * recommended_test_ratio))
    recommended_train_batches = max_batches - recommended_test_batches
    
    print(f"\n=== 推荐最终配置 ===")
    print(f"TOTAL_DATA = {config.TOTAL_DATA}")
    print(f"BATCH_SIZE = {config.BATCH_SIZE}")
    print(f"SLIDING_STEP = {config.SLIDING_STEP}")
    print(f"MAX_TRAIN_BATCHES = {recommended_train_batches}")
    print(f"TEST_BATCHES = {recommended_test_batches}")
    print(f"")
    print(f"训练样本总数: {recommended_train_batches * config.BATCH_SIZE}")
    print(f"测试样本总数: {recommended_test_batches * config.BATCH_SIZE}")
    print(f"总样本数: {(recommended_train_batches + recommended_test_batches) * config.BATCH_SIZE}")
    
    # 与原始配置对比
    improvement_factor = (recommended_train_batches * config.BATCH_SIZE) / original_sequences
    print(f"\n训练数据增长: {improvement_factor:.1f}倍")
    
    return recommended_train_batches, recommended_test_batches

if __name__ == "__main__":
    train_batches, test_batches = calculate_new_capacity() 