import xarray as xr
import pandas as pd
import numpy as np
from pathlib import Path
import re
import sys

from core import config
from core import utils
ROOT_DIR = Path(__file__).parent.parent.absolute()
sys.path.append(str(ROOT_DIR / 'core'))
import config
import utils
from pyNMME.module_calibration import BGG, QuantileMapping
from pyNMME import module_calibration


def extract_info_from_filename(filename):
    """从文件名中提取日期和预见期信息
    
    Args:
        filename (str): GFS预报文件名，例如 gfs.0p25.2025010100.f024.grib2
        
    Returns:
        tuple: (日期对象, 预见期小时数, 月份)
    """
    pattern = r'gfs\.0p25\.(\d{10})\.f(\d{3})\.grib2'
    match = re.match(pattern, filename)
    
    if not match:
        raise ValueError(f"文件名格式不正确: {filename}")
    
    date_str, lead_time_str = match.groups()
    
    # 解析日期
    year = int(date_str[0:4])
    month = int(date_str[4:6])
    day = int(date_str[6:8])
    hour = int(date_str[8:10])
    
    date = pd.Timestamp(year=year, month=month, day=day, hour=hour)
    lead_time = int(lead_time_str)
    
    return date, lead_time, month

def get_param_file(param_path,month, lead_time):
    """根据月份和预见期获取参数文件路径
    
    Args:
        month (int): 月份 (1-12)
        lead_time (int): 预见期小时数
        
    Returns:
        Path: 参数文件路径
    """
    param_file = param_path.joinpath(f'fitted_params_Month{month:02d}_Lead{lead_time:03d}.nc')
    if not param_file.exists():
        raise FileNotFoundError(f"找不到参数文件: {param_file}")
    return param_file

def apply_correction(da_fcst, ds_params, lat_idx, lon_idx):
    """对指定经纬度点的预报数据应用校正参数
    
    Args:
        da_fcst (xarray.DataArray): 预报数据（时间序列）
        ds_params (xarray.Dataset): 校正参数数据集
        lat_idx (int): 纬度索引
        lon_idx (int): 经度索引
    
    Returns:
        np.ndarray: 校正后的数据，形状为 [时间, 成员]
    """
    # 提取当前网格点的参数
    para_fcst = ds_params['para_fcst'].isel(lat=lat_idx, lon=lon_idx).values
    para_obs = ds_params['para_obs'].isel(lat=lat_idx, lon=lon_idx).values
    corr = float(ds_params['corr'].isel(lat=lat_idx, lon=lon_idx).values)
    dist_type_fcst = str(ds_params['dist_type_fcst'].isel(lat=lat_idx, lon=lon_idx).values)
    dist_type_obs = str(ds_params['dist_type_obs'].isel(lat=lat_idx, lon=lon_idx).values)
    
    # 检查参数是否有效
    if np.isnan(para_fcst).any() or np.isnan(para_obs).any() or np.isnan(corr):
        return np.full([len(da_fcst), module_calibration.n_number], np.nan)
    
    # 初始化BGG模型
    tmp_bgg = module_calibration.BGG.init_from_array(
        para_fcst_array=para_fcst,
        para_obs_array=para_obs,
        corr=corr,
        dist_type=(dist_type_fcst, dist_type_obs)
    )
    
    # 初始化校正后的数据
    tmp_calibrate = np.full([len(da_fcst), module_calibration.n_number], np.nan)
    
    # 设置随机数生成器
    
    rnd_gen = np.random.default_rng(config.fix_rnd_seed)
    
    # 对每个时间步进行校正
    for tidx in range(len(da_fcst)):
        tmp_calibrate[tidx, :] = tmp_bgg.calibrate(
            da_fcst.isel(time=tidx).values,
            cens_value=config.cens_thres[0],
            rnd_seed=rnd_gen
        )
    
    return tmp_calibrate

def delete_reftime(ds):

    use_coord = ['time', 'lat', 'lon']
    pattern = r'\.(\d{4})(\d{2})(\d{2})(\d{2})\.f'

    for coord in ds.coords:
        if coord not in use_coord:
            ds = ds.drop_vars(coord, errors='ignore')

    match = re.search(pattern, ds.encoding['source'])
    date_index = pd.to_datetime(match.group(1)+match.group(2)+match.group(3), format='%Y%m%d')
    ds = ds.assign_coords(time=pd.DatetimeIndex([date_index]))

    return ds

def main():
    try:
        # 加载掩膜数据
        da_mask = xr.open_dataarray('/app/reference/mask_pearl_river_new.nc')
        lat, lon = np.where(da_mask)
        
        # 定义路径
        gfs_real_path = Path(r"/app/data/minio/2021_3_standard/24")
        param_path = Path(r'/app/data/2021_3_params')
        save_file = Path(r'/app/data/2021_3_calibrated')
        
        # 获取所有GFS预报文件
        gfs_files = list(gfs_real_path.glob('gfs.0p25.*.grib2.nc'))
        
        for lidx, gfs_file in enumerate(gfs_files):
            start_time = pd.Timestamp.now()
            try:
                # 从文件名提取信息
                date, lead_time, month = extract_info_from_filename(gfs_file.name)
                print(f"处理文件: {gfs_file.name}, 日期: {date}, 预见期: {lead_time}小时, 月份: {month}")
                
                # 准备输出路径和文件名
                save_path = utils.make_path([f'{lead_time}'], parent_dir=save_file)
                date_str = date.strftime('%Y%m%d')
                hour_str = date.strftime('%H')
                output_filename = f'gfs.cal.0p25.{date_str}{hour_str}.f{lead_time:03d}.grib2.nc'
                output_file = save_path.joinpath(output_filename)
                
                # 获取对应的参数文件
                param_file = get_param_file(param_path, month, lead_time)
                
                # 读取参数文件
                with xr.open_dataset(param_file) as ds_params:
                    params = ds_params.load()
                
                # 读取GFS预报数据
                with xr.open_mfdataset(gfs_file, preprocess=utils.delete_reftime) as ds_gfs:
                    da_fcst = ds_gfs[config.gfs_var_nm]
                    target_time = da_fcst.time + pd.to_timedelta(lidx, unit='d')
                    da_fcst = da_fcst.assign_coords(target_time=target_time).compute()
                
                # 检查并过滤掩膜点的有效性
                valid_points = []
                for i in range(len(lat)):
                    if lat[i] < len(da_fcst.lat) and lon[i] < len(da_fcst.lon):
                        valid_points.append(i)
                    else:
                        continue
                
                if not valid_points:
                    # print(f"警告：{gfs_file.name} 没有有效的网格点可以处理，将直接复制原文件")
                    # 读取原始文件并保存为新格式
                    with xr.open_dataset(gfs_file) as ds:
                        # 添加target_time坐标
                        ds = ds.assign_coords(target_time=target_time)
                        # 保存文件
                        ds.to_netcdf(
                            output_file,
                            encoding=utils.encodings([config.gfs_var_nm])
                        )
                    print(f"原始文件已复制到: {output_file}")
                    continue
                
                # 初始化校正后的数据
                calibrated_data = np.full(
                    (len(da_fcst.time), module_calibration.n_number, len(da_fcst.lat), len(da_fcst.lon)),
                    np.nan
                )
                
                # 只处理有效的网格点
                for idx in valid_points:
                    lat_idx, lon_idx = lat[idx], lon[idx]
                    try:
                        da_fcst_point = da_fcst.isel(lat=lat_idx, lon=lon_idx)
                        calibrated_point = apply_correction(da_fcst_point, params, lat_idx, lon_idx)
                        calibrated_data[:, :, lat_idx, lon_idx] = calibrated_point
                    except Exception as e:
                        print(f"处理网格点 (lat={lat_idx}, lon={lon_idx}) 时出错: {str(e)}")
                        continue
                
                # 创建校正后的数据集
                ds_calibrate = xr.DataArray(
                    calibrated_data,
                    dims=['time', 'ens_member', 'lat', 'lon'],
                    coords={
                        'time': da_fcst.time,
                        'ens_member': np.arange(module_calibration.n_number),
                        'lat': da_fcst.lat,
                        'lon': da_fcst.lon
                    },
                ).to_dataset(name=config.gfs_var_nm)
                ds_calibrate['target_time'] = da_fcst.target_time
                
                # 保存结果
                ds_calibrate.to_netcdf(
                    output_file,
                    encoding=utils.encodings([config.gfs_var_nm])
                )
                ds_calibrate.close()
                
                print(f"校正结果已保存至: {output_file}")
                end_time = pd.Timestamp.now()
                process_time = (end_time - start_time).total_seconds()
                print(f"处理用时: {process_time:.2f} 秒")
                
            except Exception as e:
                print(f"处理文件 {gfs_file.name} 时出错: {str(e)}")
                continue
                
    except Exception as e:
        print(f"程序执行出错: {str(e)}")
        raise


if __name__ == '__main__':
    main()
