import pandas as pd
import matplotlib.pyplot as plt
import os
import glob
import numpy as np

# 输出文件夹，会遍历下面的文件夹
# 在文件夹中找到 log-0_processed.csv文件
# 处理log-0_processed.csv文件,添加相应comm_type、total_size 对应的 apperance_time
# 将文件保存在原先的文件上面
def get_total_size_count(folders,results_dir):

   
    os.makedirs(results_dir, exist_ok=True)

    # 处理每个文件夹
    for folder_path in folders:
        try:
            print(f"Processing {folder_path}...")
            
            # 获取所有processed.csv文件

            # # 规范路径，解决斜杠混合问题
            # folder_path = os.path.normpath(folder_path)
            # # 解决特殊符号cn[] 中的 ‘[]’
            # # safe_path = folder_path.replace("[", "[[]").replace("]", "[]]")
            # csv_files = glob.glob(os.path.join(folder_path, "log-0_processed.csv"))

            # print("csv_files: {csv_files}")
            
            # if not csv_files:
            #     print(f"No processed CSV files found in {folder_path}")
            #     continue
        
            # 规范路径
            folder_path = os.path.normpath(folder_path)
            
            # 直接构造文件路径并检查是否存在
            csv_file_path = os.path.join(folder_path, "log-0_processed.csv")
            if os.path.isfile(csv_file_path):
                csv_files = [csv_file_path]
            else:
                csv_files = []
            
            # 打印找到的文件（用于调试）
            print(f"Found CSV files: {csv_files}")

    
                
            # 创建一个空的DataFrame来存储所有数据
            all_data = pd.DataFrame()

            # 读取所有CSV文件并合并数据
            for file in csv_files:
                df = pd.read_csv(file)
                all_data = pd.concat([all_data, df], ignore_index=True)

            # 筛选comm_type在50-60之间的数据
            filtered_data = all_data[all_data['comm_type'].between(50, 60)]

            # 按comm_type分组并计算统计信息
            # grouped_data = filtered_data.groupby(['comm_type', 'total_size'])['appearance_time'].agg(['mean', 'count']).reset_index()

            # 按comm_type和total_size分组，计算通信时间统计数据
            grouped_data = filtered_data.groupby(['comm_type', 'total_size']).agg(
                mean_appearance_time=('appearance_time', 'mean'),
                count=('appearance_time', 'count'),
                avg_comm_time=('comm_time(us)', 'mean'),
                avg_median_comm_time=('comm_time(us)', 'median'),
                total_real_comm_time=('comm_time(us)', 'sum')
            ).reset_index()

            # 计算中位通信时间总量 = 每组中位数 * 次数
            grouped_data['median_comm_time'] = grouped_data['avg_median_comm_time'] * grouped_data['count']

            total_real_comm_time_sum = grouped_data['total_real_comm_time'].sum()
            grouped_data['percentage'] = grouped_data['total_real_comm_time'] / total_real_comm_time_sum * 100

            # 计算平均通信时间
            grouped_data['avg_comm_time'] = grouped_data['total_real_comm_time'] / grouped_data['count']

            # 创建图表
            plt.figure(figsize=(12, 6))

            # 为不同的comm_type创建不同的颜色
            colors = plt.cm.viridis(np.linspace(1, 0, len(grouped_data['comm_type'].unique())))

            # 绘制散点图
            # for (comm_type, color) in zip(grouped_data['comm_type'].unique(), colors):
            #     data = grouped_data[grouped_data['comm_type'] == comm_type]
            #     plt.scatter(data['total_size'], data['mean'],
            #                 label=f'comm_type={comm_type}',
            #                 alpha=0.6,
            #                 color=color,
            #                 s=data['count']*20)  # 点的大小根据count值变化

            # plt.xlabel('Total Size')
            # plt.ylabel('Average Appearance Time')
            # plt.title(f'Communication Pattern Analysis\n{os.path.basename(folder_path)}')
            # plt.legend()
            # plt.grid(True, alpha=0.3)
            # plt.xscale('log')  # 使用对数刻度更好地显示不同大小的数据

            # # 保存图表
            # output_filename = os.path.join(results_dir, f"{os.path.basename(folder_path)}_analysis.png")
            # plt.savefig(output_filename, dpi=300, bbox_inches='tight')
            # plt.close()
            
            # print(f"Saved analysis plot to {output_filename}")
            
            # 保存统计数据
            stats_filename = os.path.join(results_dir, f"{os.path.basename(folder_path)}_stats.csv")
            grouped_data.to_csv(stats_filename, index=False)
            print(f"Saved statistics to {stats_filename}")
            
        except Exception as e:
            print(f"Error processing {folder_path}: {str(e)}")
            continue

    print("\nAnalysis completed!")

if  __name__ == "__main__":
    # 所有需要处理的路径列表
    folders = [
        # tiny-nodes
        # "f:/PostGraduate/Point-to-Point-DATA/tiny-nodes-by-hzpProfile/3439153-1node-8proc-0-w-50",
        # "f:/PostGraduate/Point-to-Point-DATA/tiny-nodes-by-hzpProfile/3439153-1node-8proc-0-w-100",
        # "f:/PostGraduate/Point-to-Point-DATA/tiny-nodes-by-hzpProfile/3458116-2node-32proc-0-w-50",
        # "f:/PostGraduate/Point-to-Point-DATA/tiny-nodes-by-hzpProfile/3458116-2node-32proc-0-w-100",
        
        # # small-nodes
        "f:/PostGraduate/Point-to-Point-DATA/small-nodes/3493299-2node-32proc-0-w-10",
        "f:/PostGraduate/Point-to-Point-DATA/small-nodes/3493299-2node-32proc-0-w-20",
        "f:/PostGraduate/Point-to-Point-DATA/small-nodes/3493299-2node-32proc-0-w-30",
        "f:/PostGraduate/Point-to-Point-DATA/small-nodes/3493299-2node-32proc-0-w-40",
        "f:/PostGraduate/Point-to-Point-DATA/small-nodes/3493299-2node-32proc-0-w-50",
        
        # # small-nodes - 64 原子
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-4node-64proc-64-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-4node-64proc-64-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-4node-64proc-64-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-4node-64proc-64-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-4node-64proc-64-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-8node-128proc-64-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-8node-128proc-64-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-8node-128proc-64-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-8node-128proc-64-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-8node-128proc-64-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-16node-256proc-64-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-16node-256proc-64-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-16node-256proc-64-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-16node-256proc-64-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-16node-256proc-64-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-32node-512proc-64-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-32node-512proc-64-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-32node-512proc-64-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-32node-512proc-64-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3413576-32node-512proc-64-50",
        
        # # small-nodes - 32 原子
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-4node-64proc-32-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-4node-64proc-32-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-4node-64proc-32-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-4node-64proc-32-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-4node-64proc-32-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-8node-128proc-32-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-8node-128proc-32-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-8node-128proc-32-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-8node-128proc-32-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-8node-128proc-32-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-16node-256proc-32-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-16node-256proc-32-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-16node-256proc-32-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-16node-256proc-32-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-16node-256proc-32-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-32node-512proc-32-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-32node-512proc-32-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-32node-512proc-32-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-32node-512proc-32-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3414147-32node-512proc-32-50",
        
        # # small-nodes - 16 原子
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-4node-64proc-16-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-4node-64proc-16-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-4node-64proc-16-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-4node-64proc-16-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-4node-64proc-16-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-8node-128proc-16-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-8node-128proc-16-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-8node-128proc-16-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-8node-128proc-16-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-8node-128proc-16-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-16node-256proc-16-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-16node-256proc-16-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-16node-256proc-16-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-16node-256proc-16-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-16node-256proc-16-50",
        
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-32node-512proc-16-10",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-32node-512proc-16-20",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-32node-512proc-16-30",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-32node-512proc-16-40",
        # "f:/PostGraduate/Point-to-Point-DATA/small-nodes/lammps-3415271-32node-512proc-16-50",
        
        # # big-nodes
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3427930-1024node-16384proc-512-s-50",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3427930-1024node-16384proc-512-s-100",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3427948-512node-8192proc-512-s-50",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3427948-512node-8192proc-512-s-100",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3427970-256node-4096proc-512-s-50",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3427970-256node-4096proc-512-s-100",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3427997-128node-2048proc-512-s-50",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3427997-128node-2048proc-512-s-100",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3429036-64node-1024proc-256-s-50",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3429036-64node-1024proc-256-s-100",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3429422-32node-512proc-128-s-50",
        # "f:/PostGraduate/Point-to-Point-DATA/big-nodes/LAMMPS/3429422-32node-512proc-128-s-100"
    ]

    # 创建保存结果的目录
    results_dir = r"F:\PostGraduate\Point-to-Point-DATA\deal-data-code\C-lop-Prediction\analysis_results"

    get_total_size_count(folders, results_dir)