import json
import os
import pandas as pd
from config import Config
from utils.dms_convert import dms_to_deg
# from utils.interpolation import idw_batch, kriging_batch
from utils.interpolation import kriging_batch
from utils.detection import detect_anomalies
from utils.plotting import plot_residual_map
from utils.cleaning import clean_precipitation_data


# 确保 result 目录存在
os.makedirs("result", exist_ok=True)

# --- Step1. 读取数据单个文件 ---
with open("data/data2024071609.json", "r", encoding="utf-8") as f:
    data = json.load(f)

# --- Step1.1. 数据清理 ---
print("进行数据清理（删除降水无效值）...")
data = clean_precipitation_data(data)


records = []
for item in data:
    try:
        lon = dms_to_deg(item["Lon"])
        lat = dms_to_deg(item["Lat"])
        pre = float(item["PRE_1h"])
        records.append({
            "Station_Id_C": item["Station_Id_C"],
            "Station_Name": item["Station_Name"],
            "lon": lon,
            "lat": lat,
            "pre": pre
        })
    except Exception as e:
        print(f"处理记录时出错: {e}")  # 打印错误信息
        continue

df = pd.DataFrame(records)
# 插值操作
print(f"正在 进行 IDW 和 Kriging 插值...")
# df["IDW"] = idw_batch(df)
df["Kriging"], df["Kriging_std"] = kriging_batch(df)


# -------------------
# 异常检测
print(f"进行异常检测...")
df = detect_anomalies(df, Config)

# -------------------
# 读取 station.xlsx 文件
station_df = pd.read_excel("data/stations.xlsx")
# 获取站号列表
valid_station_ids = station_df["站号"].tolist()

# -------------------
# 结果保存
output_fields = [
    "Station_Id_C", "Station_Name", "Region", "lon", "lat", "pre",
    # "IDW", 
    "Kriging", "Kriging_std",
    # "IDW_residual", 
    "Kriging_residual",
    # "IDW_relative_error",
    "Kriging_relative_error",
    "Z_value", "异常等级"
]

# 过滤站点，只保留站号在 valid_station_ids 列表中的记录
filtered_df = df[df["Station_Id_C"].isin(valid_station_ids)]

# 生成结果文件名
excel_file_name = os.path.join("result", f"_异常识别结果.xlsx")
png_file_name = os.path.join("result", f"residual_map.png")

# 输出过滤后的结果到 Excel 文件
filtered_df[output_fields].to_excel(excel_file_name, index=False)

# -------------------
# 绘图
print(f"绘制 残差分布图和异常等级...")
plot_residual_map(filtered_df, save_path=png_file_name)
print("完成")

# 注释掉批量处理代码
# 获取 data 目录下所有的 JSON 文件
# data_dir = "data"
# json_files = [f for f in os.listdir(data_dir) if f.endswith('.json')]

# for json_file in json_files:
#     file_path = os.path.join(data_dir, json_file)
#     # 提取时间后缀
#     time_suffix = os.path.splitext(json_file)[0].replace("data", "")

#     # -------------------
#     # 读取数据
#     with open(file_path, "r", encoding="utf-8") as f:
#         data = json.load(f)

#     records = []
#     for item in data:
#         try:
#             lon = dms_to_deg(item["Lon"])
#             lat = dms_to_deg(item["Lat"])
#             pre = float(item["PRE_1h"])
#             records.append({
#                 "Station_Id_C": item["Station_Id_C"],
#                 "Station_Name": item["Station_Name"],
#                 "lon": lon,
#                 "lat": lat,
#                 "pre": pre
#             })
#         except:
#             continue

#     df = pd.DataFrame(records)

#     # -------------------
#     # 插值操作
#     print(f"正在对 {json_file} 进行 IDW 和 Kriging 插值...")
#     df["IDW"] = idw_batch(df)
#     df["Kriging"], df["Kriging_std"] = kriging_batch(df)

#     # -------------------
#     # 异常检测
#     print(f"对 {json_file} 进行异常检测...")
#     df = detect_anomalies(df, Config)

#     # -------------------
#     # 读取 station.xlsx 文件
#     station_df = pd.read_excel("data/stations.xlsx")
#     # 获取站号列表
#     valid_station_ids = station_df["站号"].tolist()

#     # -------------------
#     # 结果保存
#     output_fields = [
#         "Station_Id_C", "Station_Name", "Region", "lon", "lat", "pre",
#         "IDW", "Kriging", "Kriging_std",
#         "IDW_residual", "Kriging_residual",
#         "IDW_relative_error", "Kriging_relative_error",
#         "Z_value", "异常等级"
#     ]

#     # 过滤站点，只保留站号在 valid_station_ids 列表中的记录
#     filtered_df = df[df["Station_Id_C"].isin(valid_station_ids)]

#     # 生成结果文件名
#     excel_file_name = os.path.join("result", f"{time_suffix}_异常识别结果.xlsx")
#     png_file_name = os.path.join("result", f"{time_suffix}residual_map.png")

#     # 输出过滤后的结果到 Excel 文件
#     filtered_df[output_fields].to_excel(excel_file_name, index=False)

#     # -------------------
#     # 绘图
#     print(f"绘制 {json_file} 的残差分布图和异常等级...")
#     plot_residual_map(filtered_df, save_path=png_file_name)

# print("完成")
