"""
Author: zhuanglaihong
Date: 2025-04-03 14:27:40
LastEditTime: 2025-04-08 20:49:35
LastEditors: zhuanglaihong
FilePath: /zlh/rainfall/src/main.py
Copyright: Copyright (c) 2021-2024 zhuanglaihong. All rights reserved.
"""
from pathlib import Path
import os
import glob
import xarray as xr
import numpy as np
import pandas as pd
import calendar
import shutil
from core.standard_gfs import process_directory
from core.correct_gfs import (
    extract_info_from_filename,
    get_param_file,
    apply_correction,
    delete_reftime,
)

from core.pyNMME import module_calibration
from core.config import *
from path_config import *
from loguru import logger

ROOT_DIR = Path(__file__).parent.parent.absolute()


def reform_gfs_files(gfs_folder: str, reference_gfs_file: str) -> None:
    """
    重组织GFS文件，将所有子文件夹下的文件按照预见期重新组织

    Args:
        gfs_folder: GFS文件所在目录，格式为 GFS/1h/YYYY/MM/DD/HH/gfs.tHHz.pgrb2.0p25.fXXX
        reference_gfs_file: 输出文件夹路径
    """
    try:
        logger.info(f"开始重组织GFS文件: {gfs_folder}")

        # 使用Path对象进行文件遍历
        gfs_path = Path(gfs_folder)
        total_files = 0
        gfs_files = 0
        subdirs = set()

        # 使用glob递归搜索所有文件
        for file_path in gfs_path.rglob("*"):
            if file_path.is_file():
                total_files += 1
                if "pgrb2.0p25.f" in file_path.name:
                    gfs_files += 1
            elif file_path.is_dir():
                # 获取第一级子目录
                rel_path = file_path.relative_to(gfs_path)
                if len(rel_path.parts) == 1:
                    subdirs.add(rel_path.parts[0])

        logger.info(f"文件夹 {gfs_folder} 下共有 {len(subdirs)} 个子文件夹")
        logger.info(f"总共有 {total_files} 个文件，其中 {gfs_files} 个 GFS 预报文件")

        # 定义需要的预见期
        target_leads = [24, 48, 72, 96, 120]

        # 创建输出目录
        os.makedirs(reference_gfs_file, exist_ok=True)

        # 使用glob遍历所有GFS文件
        for file_path in gfs_path.rglob("*"):
            if file_path.is_file() and "pgrb2.0p25.f" in file_path.name:
                # 从路径中提取日期信息
                path_parts = file_path.parts
                year = path_parts[-5]  # 2021
                month = path_parts[-4]  # 03
                day = path_parts[-3]  # 01
                hour = file_path.name.split(".t")[-1][:2]

                # 从文件名中提取预见期信息
                lead_str = file_path.name.split("pgrb2.0p25.f")[-1][:3]
                try:
                    lead = int(lead_str)
                    if lead in target_leads:
                        # 构建目标路径和文件名
                        dst_dir = Path(reference_gfs_file) / str(lead)
                        dst_dir.mkdir(exist_ok=True)

                        # 从路径中提取日期和时间信息
                        # 从文件名中提取时间信息 (gfs.tHHz.pgrb2.0p25.fXXX)
                        hour = file_path.name.split(".t")[-1][:2]  # 提取小时

                        # 构建新的文件名格式: gfs.0p25.YYYYMMDDHH.fXXX.grib2
                        formatted_date = (
                            f"{year}{str(month).zfill(2)}{str(day).zfill(2)}{hour}"
                        )
                        dst_file = (
                            dst_dir / f"gfs.0p25.{formatted_date}.f{lead:03d}.grib2"
                        )

                        # 检查目标文件是否已存在，如果存在则跳过
                        if dst_file.exists():
                            logger.info(f"文件已存在，跳过复制: {dst_file}")
                            continue

                        # 如果目标文件不存在，则复制文件
                        try:
                            import shutil

                            shutil.copy2(file_path, dst_file)
                            logger.info(f"复制文件: {file_path} -> {dst_file}")
                        except Exception as e:
                            logger.error(f"处理文件 {file_path} 时出错: {e}")
                except ValueError:
                    continue

        logger.info("GFS文件重组织完成")

    except Exception as e:
        logger.error(f"GFS文件重组织出错: {e}")
        raise


def standardize_gfs_data(input_dir: str, output_dir: str, reference_file: str) -> None:
    """标准化 GFS 数据文件，支持处理子文件夹"""
    try:
        logger.info(f"开始标准化 GFS 数据: {input_dir}")
        total_success = 0
        total_failed = 0
        all_failed_files = []

        # 遍历所有预见期文件夹
        for lead_dir in os.listdir(input_dir):
            lead_path = os.path.join(input_dir, lead_dir)
            if os.path.isdir(lead_path):
                # 创建对应的输出目录
                output_lead_dir = os.path.join(output_dir, lead_dir)
                os.makedirs(output_lead_dir, exist_ok=True)

                # 处理当前预见期文件夹
                success_count, failed_count, failed_files = process_directory(
                    lead_path, reference_file, output_lead_dir
                )

                total_success += success_count
                total_failed += failed_count
                all_failed_files.extend(failed_files)

        logger.info(f"GFS 数据标准化完成")
        logger.info(f"总成功处理: {total_success} 个文件")
        if total_failed > 0:
            logger.warning(f"总处理失败: {total_failed} 个文件")
            for file in all_failed_files:
                logger.warning(f"  - {file}")
    except Exception as e:
        logger.error(f"GFS 数据标准化出错: {e}")
        raise

def save_params_data(source_folder,target_folder) -> None:
    """
    将参数文件从源文件夹移动到目标文件夹
    
    参数:
        source_folder: 数据源文件夹路径
        target_folder: 目标文件夹路径
    """


    try:
        # 确保目标目录存在
        os.makedirs(target_folder, exist_ok=True)
        
        logger.info(f"开始移动参数数据文件")
        logger.info(f"源文件夹: {source_folder}")
        logger.info(f"目标文件夹: {target_folder}")
        
        # 获取源文件夹中的所有nc文件
        mswep_files = glob.glob(os.path.join(source_folder, "*.nc"))
        
        if not mswep_files:
            logger.warning(f"在源文件夹中未找到nc文件")
            return
            
        # 统计成功和失败的文件
        success_count = 0
        failed_files = []
        
        # 移动每个文件
        for source_file in mswep_files:
            try:
                file_name = os.path.basename(source_file)
                target_file = os.path.join(target_folder, file_name)
                
                # 如果目标文件已存在，则跳过
                if os.path.exists(target_file):
                    logger.info(f"文件已存在，跳过复制: {target_file}")
                    continue
                
                # 移动文件
                import shutil
                shutil.copy2(source_file, target_file)
                success_count += 1
                logger.info(f"成功移动文件: {file_name}")
                
            except Exception as e:
                logger.error(f"移动文件 {file_name} 时出错: {e}")
                failed_files.append(file_name)
                continue
        
        # 输出统计信息
        logger.info(f"文件移动完成")
        logger.info(f"成功移动: {success_count} 个文件")
        
        if failed_files:
            logger.warning(f"移动失败: {len(failed_files)} 个文件")
            for file in failed_files:
                logger.warning(f"  - {file}")
                
    except Exception as e:
        logger.error(f"移动参数数据文件时出错: {e}")
        raise

def correct_gfs_forecast(
    standard_gfs_folder: str, params_folder: str, output_folder: str
) -> None:
    """基于参数文件对GFS数据进行订正"""
    try:
        os.makedirs(output_folder, exist_ok=True)

        # 遍历每个预见期目录
        for lead in leads:
            gfs_path = Path(standard_gfs_folder) / str(lead)
            gfs_files = list(gfs_path.glob("gfs.0p25.*.grib2.nc"))

            for lidx, gfs_file in enumerate(gfs_files):
                start_time = pd.Timestamp.now()
                try:
                    # 从文件名提取信息
                    date, lead_time, month = extract_info_from_filename(gfs_file.name)

                    # 构建输出路径和文件名
                    save_path = Path(output_folder) / str(lead)
                    date_str = date.strftime("%Y%m%d")
                    hour_str = date.strftime("%H")
                    output_filename = (
                        f"gfs.cal.0p25.{date_str}{hour_str}.f{lead_time:03d}.grib2.nc"
                    )
                    output_file = save_path / output_filename

                    # 检查文件是否已存在
                    if output_file.exists():
                        logger.info(f"文件已存在，跳过处理: {output_file}")
                        continue

                    logger.info(
                        f"处理文件: {gfs_file.name}, 日期: {date}, 预见期: {lead_time}小时"
                    )

                    # 获取参数文件
                    param_file = get_param_file(Path(params_folder), month, lead_time)

                    # 读取参数文件
                    with xr.open_dataset(param_file) as ds_params:
                        params = ds_params.load()

                    # 读取GFS预报数据
                    with xr.open_mfdataset(
                        gfs_file, preprocess=delete_reftime
                    ) as ds_gfs:
                        da_fcst = ds_gfs["A_PCP_L1_Accum_1"]
                        target_time = da_fcst.time + pd.to_timedelta(
                            lidx, unit="d"
                        )  # 计算目标时间（预报时间 + 提前天数）
                        da_fcst = da_fcst.assign_coords(
                            target_time=target_time
                        ).compute()  # 添加目标时间坐标并计算

                    # 初始化校正后的数据
                    calibrated_data = np.full(
                        (
                            len(da_fcst.time),
                            module_calibration.n_number,
                            len(da_fcst.lat),
                            len(da_fcst.lon),
                        ),
                        np.nan,
                    )

                    # 对掩膜区域内的网格点进行校正
                    mask_file = os.path.join(
                        ROOT_DIR, "reference/mask_pearl_river_new.nc"
                    )
                    da_mask = xr.open_dataarray(mask_file)
                    lat, lon = np.where(da_mask)
                    for gidx in range(len(lat)):
                        lat_idx, lon_idx = lat[gidx], lon[gidx]
                        da_fcst_point = da_fcst.isel(lat=lat_idx, lon=lon_idx)
                        calibrated_point = apply_correction(
                            da_fcst_point, params, lat_idx, lon_idx
                        )
                        calibrated_data[:, :, lat_idx, lon_idx] = calibrated_point

                    # 保存校正结果
                    os.makedirs(save_path, exist_ok=True)

                    # 创建并保存数据集
                    ds_calibrate = xr.DataArray(
                        calibrated_data,
                        dims=["time", "ens_member", "lat", "lon"],
                        coords={
                            "time": da_fcst.time,
                            "ens_member": np.arange(module_calibration.n_number),
                            "lat": da_fcst.lat,
                            "lon": da_fcst.lon,
                        },
                    ).to_dataset(name="A_PCP_L1_Accum_1")
                    ds_calibrate["target_time"] = da_fcst.target_time

                    ds_calibrate.to_netcdf(
                        output_file,
                        encoding=dict.fromkeys(["A_PCP_L1_Accum_1"], dict(zlib=True)),
                    )
                    ds_calibrate.close()
                    logger.info(f"已保存校正结果: {output_file}")
                    end_time = pd.Timestamp.now()
                    process_time = (end_time - start_time).total_seconds()
                    logger.info(f"处理用时: {process_time:.2f} 秒")
                except Exception as e:
                    logger.error(f"处理文件 {gfs_file.name} 时出错: {e}")
                    continue

        logger.info("GFS数据订正完成")

    except Exception as e:
        logger.error(f"GFS数据订正出错: {e}")
        raise


def save_corrected_gfs(corrected_folder: str, minio_folder: str):
    """保存校正后的GFS数据到MinIO
        
    Args:
        corrected_folder: 校正后的GFS数据目录
        minio_folder: MinIO目标文件夹路径
    """
    try:
        logger.info(f"开始移动校正后的GFS数据: {corrected_folder}")

        # 遍历校正后的文件夹
        for lead_dir in os.listdir(corrected_folder):
            lead_path = os.path.join(corrected_folder, lead_dir)
            if os.path.isdir(lead_path):
                # 遍历每个预见期文件夹中的文件
                for filename in os.listdir(lead_path):
                    if filename.endswith(".nc"):
                        try:
                            # 解析文件名中的日期和预报时间
                            # 例如：gfs.cal.0p25.2021030106.f024.grib2.nc
                            parts = filename.split(".")
                            datetime_str = parts[3]  # 2021030106

                            # 构建日期路径
                            year = datetime_str[:4]
                            month = datetime_str[4:6]
                            day = datetime_str[6:8]
                            hour = datetime_str[8:10]

                            # 构建目标路径
                            target_dir = os.path.join(
                                minio_folder, year, month, day, hour
                            )
                            os.makedirs(target_dir, exist_ok=True)

                            # 构建源文件和目标文件的完整路径
                            source_file = os.path.join(lead_path, filename)
                            target_file = os.path.join(target_dir, filename)

                            # 如果目标文件已存在，先删除 #TODO
                            if os.path.exists(target_file):
                                os.remove(target_file)

                            # 复制文件
                            import shutil

                            shutil.copy2(source_file, target_file)
                            logger.info(f"成功移动文件: {filename} -> {target_file}")

                        except Exception as e:
                            logger.error(f"处理文件 {filename} 时出错: {e}")
                            continue

        logger.info("所有校正后的文件移动完成")

    except Exception as e:
        logger.error(f"移动校正后的文件失败: {e}")
        import traceback

        logger.error(traceback.format_exc())
        raise

def generate_month_combinations(start_date: str, end_date: str) -> list:
    """生成指定日期范围内的年月组合
    
    Args:
        start_date: 开始日期，格式为 'YYYY-MM'
        end_date: 结束日期，格式为 'YYYY-MM'
    
    Returns:
        list: 包含(year, month)元组的列表
    """
    from datetime import datetime
    from dateutil.relativedelta import relativedelta
    
    # 解析日期字符串
    start = datetime.strptime(start_date, '%Y-%m')
    end = datetime.strptime(end_date, '%Y-%m')
    
    combinations = []
    current = start
    
    # 生成日期范围内的所有年月组合
    while current <= end:
        combinations.append((current.year, current.month))
        current += relativedelta(months=1)
        
    return combinations


def process_main(start_date: str, end_date: str):
    """处理指定日期范围内的主函数
    
    Args:
        start_date: 开始日期，格式为 'YYYY-MM'
        end_date: 结束日期，格式为 'YYYY-MM'
    """
    # 获取所有年月组合
    date_combinations = generate_month_combinations(start_date, end_date)
    
    for year, monidx in date_combinations:
        logger.info(f"开始处理 {year}年{monidx:02d}月 的数据")
        
        # 设置相关路径，使用:02d格式化月份为两位数
        gfs_folder = os.path.join(ROOT_DIR, f"/app/minio/gfs-origin/GFS/CHN-FORECASAT/1h/{year}/{monidx:02d}")
        reform_gfs_folder = os.path.join(ROOT_DIR, f"data/minio/{year}_{monidx:02d}_gfs_grb2")
        standard_gfs_folder = os.path.join(ROOT_DIR, f"data/minio/{year}_{monidx:02d}_standard")
        reference_nc_file = os.path.join(ROOT_DIR, "reference/", "gfs.0p25.2021010100.f024.grib2.nc")
        source_folder = os.path.join(ROOT_DIR,"/app/minio/rainfall/A_OA_data/GFS/params" ) # 参数数据源文件夹
        params_folder = os.path.join(ROOT_DIR, "data/params")  # 目标文件夹
        corrected_gfs_folder = os.path.join(ROOT_DIR, f"data/{year}_{monidx:02d}_calibrated")
        minio_folder = os.path.join(ROOT_DIR, "/app/minio/rainfall/A_OA_data/GFS/1h_revise")
        

        try:
            logger.info("开始标准化GFS数据")
            
            # 使用calendar模块获取当月天数
            days_in_month = calendar.monthrange(year, monidx)[1]
            for day in range(1, days_in_month + 1):
                day_folder = os.path.join(gfs_folder, str(day).zfill(2))
                # 检查日期文件夹是否存在
                if os.path.exists(day_folder):
                    reform_gfs_files(day_folder, reform_gfs_folder)
                else:
                    logger.warning(f"{year}年{monidx}月{day}日的数据文件夹不存在: {day_folder}")
                    
            standardize_gfs_data(reform_gfs_folder, standard_gfs_folder, reference_nc_file) 

            logger.info("基于参数文件对GFS数据订正")
            save_params_data(source_folder,params_folder)
            correct_gfs_forecast(standard_gfs_folder, params_folder, corrected_gfs_folder)

            logger.info("上传GFS数据至MinIO")
            save_corrected_gfs(corrected_gfs_folder, minio_folder)
            logger.info("删除临时文件")
            shutil.rmtree(reform_gfs_folder)
            shutil.rmtree(standard_gfs_folder)            

        except Exception as e:
            logger.error(f"处理 {year}年{monidx}月 数据时出错: {e}")
            continue
            
        logger.info(f"完成处理 {year}年{monidx}月 的数据")


def main():
    
    start_date = "2025-05"  # 订正开始月份
    end_date = "2025-05" # 订正结束月份
    
    try:
        process_main(start_date, end_date)
    except Exception as e:
        logger.error(f"程序执行出错: {e}")


if __name__ == "__main__":
    main()
