import csv
import numpy as np
from collections import defaultdict
import glob
import os

def analyze_ranges_by_batch_size():
    """分析每个timings文件中每个batch size下total_seq_length和verify_time_ms的变化范围，并输出verify_time_ms变化范围/total_seq_length变化范围"""
    
    print("=== 分析每个timings文件中每个batch size下的变化范围 ===\n")
    
    # 查找所有timings文件
    timings_files = glob.glob("timings_all_setting_rlhf_*.csv")
    
    if not timings_files:
        print("未找到timings文件，请确保文件在当前目录下")
        return
    
    print(f"找到 {len(timings_files)} 个timings文件: {sorted(timings_files)}\n")
    
    # 为每个文件分析数据
    for file_path in sorted(timings_files):
        print(f"=== 分析文件: {file_path} ===\n")
        
        # 检查文件是否存在
        try:
            with open(file_path, 'r') as f:
                pass
        except FileNotFoundError:
            print(f"文件 {file_path} 不存在，跳过")
            continue
        
        # 按batch_size分组存储数据
        batch_data = defaultdict(lambda: {'total_seq_length': [], 'verify_time_ms': []})
        
        print(f"正在读取文件: {file_path}")
        
        with open(file_path, newline='') as f:
            reader = csv.DictReader(f)
            
            for row in reader:
                if row.get('batch_size'):
                    try:
                        batch_size = int(row['batch_size'])
                        
                        # 收集total_seq_length数据
                        if row.get('total_seq_length'):
                            total_seq_length = float(row['total_seq_length'])
                            batch_data[batch_size]['total_seq_length'].append(total_seq_length)
                        
                        # 收集verify_time_ms数据
                        if row.get('verify_time_ms'):
                            verify_time_ms = float(row['verify_time_ms'])
                            batch_data[batch_size]['verify_time_ms'].append(verify_time_ms)
                            
                    except (ValueError, KeyError):
                        continue
        
        print(f"\n=== {file_path} 中每个batch size的变化范围统计 ===\n")
        
        # 计算每个batch size的统计信息
        for batch_size in sorted(batch_data.keys()):
            print(f"Batch Size {batch_size:2d}:")
            
            # total_seq_length统计
            seq_lengths = np.array(batch_data[batch_size]['total_seq_length'])
            # verify_time_ms统计
            verify_times = np.array(batch_data[batch_size]['verify_time_ms'])
            
            if len(seq_lengths) > 0:
                seq_min = np.min(seq_lengths)
                seq_max = np.max(seq_lengths)
                seq_range = seq_max - seq_min
                print(f"  total_seq_length:")
                print(f"    数据点数量: {len(seq_lengths):4d}")
                print(f"    最小值:     {seq_min:8.2f}")
                print(f"    最大值:     {seq_max:8.2f}")
                print(f"    变化范围:   {seq_range:8.2f}")
                print(f"    平均值:     {np.mean(seq_lengths):8.2f}")
                print(f"    中位数:     {np.median(seq_lengths):8.2f}")
                print(f"    标准差:     {np.std(seq_lengths):8.2f}")
            else:
                seq_range = None
                print(f"  total_seq_length: 无数据")
            
            if len(verify_times) > 0:
                verify_min = np.min(verify_times)
                verify_max = np.max(verify_times)
                verify_range = verify_max - verify_min
                print(f"  verify_time_ms:")
                print(f"    数据点数量: {len(verify_times):4d}")
                print(f"    最小值:     {verify_min:8.2f}")
                print(f"    最大值:     {verify_max:8.2f}")
                print(f"    变化范围:   {verify_range:8.2f}")
                print(f"    平均值:     {np.mean(verify_times):8.2f}")
                print(f"    中位数:     {np.median(verify_times):8.2f}")
                print(f"    标准差:     {np.std(verify_times):8.2f}")
            else:
                verify_range = None
                print(f"  verify_time_ms: 无数据")
            
            # 输出verify_time_ms变化范围/total_seq_length变化范围
            if seq_range and verify_range is not None and seq_range != 0:
                ratio = verify_range / seq_range
                print(f"  verify_time_ms变化范围/total_seq_length变化范围: {ratio:.6f}")
            else:
                print(f"  verify_time_ms变化范围/total_seq_length变化范围: 无法计算")
            
            print()
        
        # 文件总结统计
        print(f"=== {file_path} 总结统计 ===\n")
        
        all_batch_sizes = sorted(batch_data.keys())
        print(f"总共分析了 {len(all_batch_sizes)} 个不同的batch size: {all_batch_sizes}")
        
        # 找出变化范围最大的batch size
        max_seq_range_batch = None
        max_seq_range = 0
        max_verify_range_batch = None
        max_verify_range = 0
        
        for batch_size in all_batch_sizes:
            seq_lengths = np.array(batch_data[batch_size]['total_seq_length'])
            verify_times = np.array(batch_data[batch_size]['verify_time_ms'])
            
            if len(seq_lengths) > 0:
                seq_range = np.max(seq_lengths) - np.min(seq_lengths)
                if seq_range > max_seq_range:
                    max_seq_range = seq_range
                    max_seq_range_batch = batch_size
            
            if len(verify_times) > 0:
                verify_range = np.max(verify_times) - np.min(verify_times)
                if verify_range > max_verify_range:
                    max_verify_range = verify_range
                    max_verify_range_batch = batch_size
        
        if max_seq_range_batch is not None:
            print(f"total_seq_length变化范围最大的batch size: {max_seq_range_batch} (范围: {max_seq_range:.2f})")
        
        if max_verify_range_batch is not None:
            print(f"verify_time_ms变化范围最大的batch size: {max_verify_range_batch} (范围: {max_verify_range:.2f})")
        
        print("\n" + "="*80 + "\n")

if __name__ == "__main__":
    analyze_ranges_by_batch_size() 