import os
import numpy as np
import pandas as pd
import xarray as xr
from pathlib import Path
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from tqdm import tqdm
import logging

# 配置参数
INPUT_CSV = "/mnt/datastore/liudddata/cloudsat_data/merged_201902_202004_cloudsat.csv"
OUTPUT_NC = "/mnt/datastore/liudddata/cloudsat_data/cloudsat_annual_mean_1deg_global.nc"

# 设置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('cloudsat_annual_processing.log'),
        logging.StreamHandler()
    ]
)


def load_and_preprocess_data():
    """加载并预处理CloudSat数据"""
    logging.info("开始加载CSV数据...")

    # 读取数据
    df = pd.read_csv(INPUT_CSV)
    logging.info(f"原始数据量: {len(df):,} 条记录")

    # 转换时间列
    df['time'] = pd.to_datetime(df['time'])

    # 数据质量控制
    initial_count = len(df)
    df = df.dropna(subset=['cloudsat_cbh', 'longitude', 'latitude'])
    df = df[(df['cloudsat_cbh'] >= 0) & (df['cloudsat_cbh'] <= 20000)]  # 合理范围
    df = df[(df['longitude'] >= -180) & (df['longitude'] <= 180)]
    df = df[(df['latitude'] >= -90) & (df['latitude'] <= 90)]

    filtered_count = len(df)
    logging.info(f"质量控制后数据量: {filtered_count:,} 条记录")
    logging.info(f"过滤掉 {initial_count - filtered_count:,} 条无效记录")

    return df


# def create_global_grid(bin_size=0.1):
def create_global_grid(bin_size=1):
    """创建全球0.1°网格"""
    # 经度范围: -180 到 180
    # 纬度范围: -90 到 90
    lon_edges = np.arange(-180, 180 + bin_size, bin_size)
    lat_edges = np.arange(-90, 90 + bin_size, bin_size)

    # 网格中心点
    lon_centers = (lon_edges[:-1] + lon_edges[1:]) / 2
    lat_centers = (lat_edges[:-1] + lat_edges[1:]) / 2

    return {
        'lon_edges': lon_edges,
        'lat_edges': lat_edges,
        'lon_centers': lon_centers,
        'lat_centers': lat_centers,
        'shape': (len(lat_centers), len(lon_centers))
    }


def calculate_annual_mean(df, grid_info, min_samples=5):
    """计算年平均云底高度"""
    logging.info("开始计算年平均...")

    # 提取有效时间段：2019年2月-2020年4月
    # 这是一个15个月的时间段，我们将其视为一个"年"的平均
    mask = ((df['time'].dt.year == 2019) & (df['time'].dt.month >= 2)) | \
           ((df['time'].dt.year == 2020) & (df['time'].dt.month <= 4))

    annual_df = df[mask].copy()
    logging.info(f"用于年平均计算的数据量: {len(annual_df):,} 条记录")

    # 将数据分配到网格
    lon_edges = grid_info['lon_edges']
    lat_edges = grid_info['lat_edges']

    # 计算每个点所属的网格索引
    annual_df['lon_idx'] = np.digitize(annual_df['longitude'], lon_edges) - 1
    annual_df['lat_idx'] = np.digitize(annual_df['latitude'], lat_edges) - 1

    # 过滤掉超出范围的索引
    valid_idx = (annual_df['lon_idx'] >= 0) & (annual_df['lon_idx'] < len(grid_info['lon_centers'])) & \
                (annual_df['lat_idx'] >= 0) & (annual_df['lat_idx'] < len(grid_info['lat_centers']))
    annual_df = annual_df[valid_idx]

    # 分组计算统计量
    grouped = annual_df.groupby(['lat_idx', 'lon_idx'])

    # 初始化结果数组
    cbh_mean = np.full(grid_info['shape'], np.nan)
    cbh_std = np.full(grid_info['shape'], np.nan)
    sample_count = np.zeros(grid_info['shape'])

    # 计算每个网格的统计量
    logging.info("计算网格统计量...")
    for (lat_idx, lon_idx), group in tqdm(grouped, desc="处理网格"):
        if len(group) >= min_samples:
            cbh_mean[lat_idx, lon_idx] = group['cloudsat_cbh'].mean()
            cbh_std[lat_idx, lon_idx] = group['cloudsat_cbh'].std()
            sample_count[lat_idx, lon_idx] = len(group)
        else:
            sample_count[lat_idx, lon_idx] = len(group)  # 记录实际数量，即使小于阈值

    # 应用样本量阈值
    cbh_mean[sample_count < min_samples] = np.nan
    cbh_std[sample_count < min_samples] = np.nan

    logging.info(f"有效网格数: {np.sum(~np.isnan(cbh_mean)):,} / {cbh_mean.size:,}")
    logging.info(f"平均每个有效网格样本数: {np.mean(sample_count[sample_count >= min_samples]):.1f}")

    return cbh_mean, cbh_std, sample_count


def save_to_netcdf(cbh_mean, cbh_std, sample_count, grid_info):
    """保存结果到NetCDF文件"""
    logging.info("保存结果到NetCDF...")

    # 创建xarray Dataset
    ds = xr.Dataset(
        {
            "cloud_base_height": (("lat", "lon"), cbh_mean),
            "cloud_base_height_std": (("lat", "lon"), cbh_std),
            "sample_count": (("lat", "lon"), sample_count)
        },
        coords={
            "lon": grid_info['lon_centers'],
            "lat": grid_info['lat_centers']
        },
        attrs={
            "title": "CloudSat年平均云底高度（1°网格）",
            "description": "基于2019年2月-2020年4月CloudSat数据计算的全球年平均云底高度",
            "time_period": "2019-02-01 to 2020-04-30",
            "grid_resolution": "1 degree",
            "units": "meters",
            "min_samples_per_grid": 5,
            "data_source": "CloudSat 2B-CLDCLASS",
            "processing_method": "网格分箱平均",
            "created_by": "CloudSat Processing Script"
        }
    )

    # 保存文件
    ds.to_netcdf(OUTPUT_NC)
    logging.info(f"结果已保存至: {OUTPUT_NC}")

    return ds


def create_quicklook_plot(ds, output_dir="."):
    """创建结果快速查看图"""
    logging.info("创建可视化图...")

    fig, axes = plt.subplots(2, 2, figsize=(16, 12),
                             subplot_kw={'projection': ccrs.PlateCarree()})

    # 年平均云底高度
    im1 = axes[0, 0].pcolormesh(ds.lon, ds.lat, ds.cloud_base_height,
                                cmap='viridis', vmin=0, vmax=10000)
    axes[0, 0].coastlines()
    axes[0, 0].add_feature(cfeature.BORDERS, linestyle=':')
    axes[0, 0].set_title('年平均云底高度 (m)')
    plt.colorbar(im1, ax=axes[0, 0], orientation='horizontal', pad=0.05)

    # 标准差
    im2 = axes[0, 1].pcolormesh(ds.lon, ds.lat, ds.cloud_base_height_std,
                                cmap='plasma', vmin=0, vmax=5000)
    axes[0, 1].coastlines()
    axes[0, 1].add_feature(cfeature.BORDERS, linestyle=':')
    axes[0, 1].set_title('云底高度标准差 (m)')
    plt.colorbar(im2, ax=axes[0, 1], orientation='horizontal', pad=0.05)

    # 样本数量（对数尺度）
    sample_log = np.log10(np.where(ds.sample_count > 0, ds.sample_count, np.nan))
    im3 = axes[1, 0].pcolormesh(ds.lon, ds.lat, sample_log,
                                cmap='YlOrBr', vmin=0, vmax=4)
    axes[1, 0].coastlines()
    axes[1, 0].add_feature(cfeature.BORDERS, linestyle=':')
    axes[1, 0].set_title('样本数量 (log10)')
    plt.colorbar(im3, ax=axes[1, 0], orientation='horizontal', pad=0.05)

    # 数据覆盖情况
    coverage = ~np.isnan(ds.cloud_base_height)
    im4 = axes[1, 1].pcolormesh(ds.lon, ds.lat, coverage,
                                cmap='RdYlBu', vmin=0, vmax=1)
    axes[1, 1].coastlines()
    axes[1, 1].add_feature(cfeature.BORDERS, linestyle=':')
    axes[1, 1].set_title('数据覆盖情况 (1=有数据, 0=无数据)')
    plt.colorbar(im4, ax=axes[1, 1], orientation='horizontal', pad=0.05)

    plt.tight_layout()
    plot_path = os.path.join(output_dir, "cloudsat_annual_mean_quicklook.png")
    plt.savefig(plot_path, dpi=300, bbox_inches='tight')
    logging.info(f"可视化图已保存至: {plot_path}")

    # 统计信息
    valid_data = ds.cloud_base_height.where(~np.isnan(ds.cloud_base_height))
    logging.info(f"全球平均云底高度: {valid_data.mean().values:.2f} m")
    logging.info(f"全球云底高度标准差: {valid_data.std().values:.2f} m")
    logging.info(f"数据覆盖率: {np.sum(~np.isnan(ds.cloud_base_height)) / ds.cloud_base_height.size * 100:.2f}%")


def main():
    """主处理流程"""
    logging.info("=== CloudSat全球年平均云底高度计算开始 ===")

    try:
        # 步骤1: 加载和预处理数据
        df = load_and_preprocess_data()

        # 步骤2: 创建全球网格
        # grid_info = create_global_grid(bin_size=0.1)
        grid_info = create_global_grid(bin_size=1.0)
        logging.info(f"网格形状: {grid_info['shape']} (lat × lon)")
        logging.info(f"经度范围: {grid_info['lon_edges'][0]:.1f} 到 {grid_info['lon_edges'][-1]:.1f}")
        logging.info(f"纬度范围: {grid_info['lat_edges'][0]:.1f} 到 {grid_info['lat_edges'][-1]:.1f}")

        # 步骤3: 计算年平均
        cbh_mean, cbh_std, sample_count = calculate_annual_mean(df, grid_info, min_samples=5)

        # 步骤4: 保存结果
        ds = save_to_netcdf(cbh_mean, cbh_std, sample_count, grid_info)

        # 步骤5: 创建可视化
        create_quicklook_plot(ds)

        logging.info("=== 处理完成 ===")

    except Exception as e:
        logging.error(f"处理过程中出错: {str(e)}", exc_info=True)
        raise


if __name__ == "__main__":
    main()