# 李--对一个文件夹的内容进行单个位点的每个年份的cf的年度平均--最终版
import os
import pandas as pd
import csv

# 指定输入文件夹路径
input_folder = r"F:\data\CMIP6\cf_history"
result_folder = r"F:\data\CMIP6\history_ave"

# 遍历每个文件夹
for folder_name in os.listdir(input_folder):
    folder_path = os.path.join(input_folder, folder_name)
    if os.path.isdir(folder_path):
        # 创建一个空的字典来存储计算得到的平均值
        dic = {}

        # 列出文件夹中的所有CSV文件
        for file_name in os.listdir(folder_path):
            if file_name.endswith('.csv'):
                input_file = os.path.join(folder_path, file_name)

                # 读取CSV文件并计算平均值
                data = pd.read_csv(input_file, encoding='latin-1')
                a, b, c = file_name.split('_')
                c = c.replace('.csv', '')
                cf80 = data['cf_80'].mean()
                cf100 = data['cf_100'].mean()
                cf150 = data['cf_150'].mean()
                dic[(b, c)] = (cf80, cf100, cf150)

        # 输出计算得到的平均值字典
        print(dic)
        print("\n")
        # 指定输出文件路径
        out_file = r"F:\data\CMIP6\all8_site_without_cf_select.csv"
        # 读取现有的CSV文件
        with open(out_file, 'r', encoding='latin-1') as file:
            reader = csv.reader(file)
            data = list(reader)

        # 添加新列标题
        data[0].append('cf_ave_80')
        data[0].append('cf_ave_100')
        data[0].append('cf_ave_150')

        # 添加新列数据
        for row in data[1:]:
            row.append(dic[(row[0], row[1])][0])
            row.append(dic[(row[0], row[1])][1])
            row.append(dic[(row[0], row[1])][2])

        # 创建新的文件名
        output_file_name = f"ave_{folder_name}.csv"
        output_file_path = os.path.join(result_folder, output_file_name)

        # 将更新后的数据写入新的CSV文件
        with open(output_file_path, 'w', newline='', encoding='latin-1') as file:
            writer = csv.writer(file)
            writer.writerows(data)

        print(f"Data processed and saved to {output_file_path}")

print("All data processed and saved successfully")