"""
Author: 'silencesoup' 'silencesoup@outlook.com'
Date: 2024-12-01 14:55:41
LastEditors: 'silencesoup' 'silencesoup@outlook.com'
LastEditTime: 2024-12-03 11:24:01
FilePath: \neimeng_2024\main.py
Description: 这是默认设置,请设置`customMade`, 打开koroFileHeader查看配置 进行设置: https://github.com/OBKoro1/koro1FileHeader/wiki/%E9%85%8D%E7%BD%AE
"""

import datetime

from loguru import logger
from hydroevaluate.configs.config import cmd, default_config_file, update_cfg
import xarray as xr
import pandas as pd
from utils import (
    check_time_interval,
    get_basin_area,
    move_time_start_back_30_days,
    move_time_start_back_365_days_day_format,
    move_time_start_forward_6_days_and_21_hours,
    move_time_start_forward_6_days,
    trans_to_hourly,
)
from hydroevaluate.hydroevaluate import EvalDeepHydro
from data_source import PastDataSource, agg_rain_data


def model_infer(model_name, time_start, gage_id_lst, tp_data):
    """
    Args:
        model_name (_type_): "camels_3h" or "camels_1D" or "camelsandneimeng_3h" or "camelsandneimeng_1D" or "neimeng_3h" or "neimeng_1D"
        time_start (_type_):"2024-08-01-01"
        gage_id_lst (_type_): ["NM000001", "NM000002"]
        new_data (_type_): json
    """
    logger.warning(time_start)
    logger.warning(type(time_start))
    logger.warning(gage_id_lst)
    logger.warning(type(gage_id_lst))
    logger.warning(tp_data)
    logger.warning(type(tp_data))
    if "3h" in model_name:
        new_time_start = move_time_start_back_30_days(time_start)
        time_end = move_time_start_forward_6_days_and_21_hours(time_start)
        data_source = PastDataSource(tp_data=tp_data)
        cfg_file = default_config_file()
        pth_path = f"data/{model_name}/best_model.pth"
        stat_file_path = f"data/{model_name}/dapengscaler_stat.json"
        args = cmd(
            object_ids=gage_id_lst,
            t_range_test=[(new_time_start, time_end)],
            download=False,
            pth_path=pth_path,
            stat_file_path=stat_file_path,
            model_type="torchhydro",
            device=[-1],
            model_name="Seq2Seq",
            horizon=56,
            rho=240,
            var_lst=["tp"],
            target_cols=["streamflow"],
            feature_mapping={
                "tp": {
                    "category": "precipitation",
                    "time_ranges": [(0, 296)],
                    "offset": 1,
                },
            },
            model_hyperparam={
                "en_input_size": 16,
                "de_input_size": 17,
                "output_size": 1,
                "hidden_size": 256,
                "forecast_length": 56,
                "prec_window": 1,
                "teacher_forcing_ratio": 0.5,
            },
        )
        update_cfg(cfg_file, args)
        eval_deep_hydro = EvalDeepHydro(cfg_file, data_source)
        pred = eval_deep_hydro.model_infer()
        pred = pred.isel(time=slice(1, None))
        # 将ds转成df
        df_pred = pred.to_dataframe().reset_index()
        df_pred = trans_to_hourly(df_pred)
        df_rain = pd.DataFrame(tp_data)[["time", "tp"]]
        df_rain["time"] = pd.to_datetime(df_rain["time"])
        df_pred = pd.merge(df_pred, df_rain, how="left", on="time")
        df_pred.fillna(0, inplace=True)
    elif "1D" in model_name:
        new_time_start = move_time_start_back_365_days_day_format(time_start)
        time_end = move_time_start_forward_6_days(time_start)
        data_source = PastDataSource(tp_data=tp_data)
        cfg_file = default_config_file()
        pth_path = f"data/{model_name}/best_model.pth"
        stat_file_path = f"data/{model_name}/dapengscaler_stat.json"
        args = cmd(
            object_ids=gage_id_lst,
            t_range_test=[(new_time_start, time_end)],
            download=False,
            pth_path=pth_path,
            stat_file_path=stat_file_path,
            model_type="torchhydro",
            device=[-1],
            model_name="Seq2Seq",
            horizon=7,
            rho=365,
            var_lst=["tp"],
            target_cols=["streamflow"],
            feature_mapping={
                "tp": {
                    "category": "precipitation",
                    "time_ranges": [(0, 372)],
                    "offset": 1,
                },
            },
            model_hyperparam={
                "en_input_size": 16,
                "de_input_size": 17,
                "output_size": 1,
                "hidden_size": 256,
                "forecast_length": 7,
                "prec_window": 1,
                "teacher_forcing_ratio": 0.5,
            },
            min_time_interval=1,
            min_time_unit="D",
        )
        update_cfg(cfg_file, args)
        eval_deep_hydro = EvalDeepHydro(cfg_file, data_source)
        pred = eval_deep_hydro.model_infer()
        pred = pred.isel(time=slice(1, None))
        # 将ds转成df
        df_pred = pred.to_dataframe().reset_index()
        df_rain = agg_rain_data(tp_data, [new_time_start, time_end], "1D")
        area = get_basin_area(gage_id_lst[0])
        df_pred = pd.merge(df_pred, df_rain, how="left", on="time")
        df_pred.fillna(0, inplace=True)
        df_pred["streamflow"] = df_pred["streamflow"] / 24
    area = get_basin_area(gage_id_lst[0])
    df_pred["streamflow"] = df_pred["streamflow"] * area / 3.6
    if not df_pred.empty:
        df_pred = df_pred[["basin", "time", "streamflow", "tp"]]
        # 将time转成str
        df_pred["time"] = df_pred["time"].apply(
            lambda x: x.strftime("%Y-%m-%d %H:%M:%S") if not pd.isnull(x) else x
        )
    return df_pred


def test_3h():
    full_data = {
        "model_name": "neimenggu_3h",
        "time_start": "2024-07-22-01",
        "gage_id_lst": ["neimenggu_11200260"],
        "data": [
            {"basin": "neimenggu_11200260", "time": "2024-07-22T01:00:00", "tp": 10.5},
            {"basin": "neimenggu_11200260", "time": "2024-07-22T02:00:00", "tp": 10.5},
            {"basin": "neimenggu_11200260", "time": "2024-07-22T04:00:00", "tp": 15.0},
        ],
    }
    model_name = full_data["model_name"]
    time_start = full_data["time_start"]
    gage_id_lst = full_data["gage_id_lst"]
    tp_data = full_data["data"]
    pred = model_infer(
        model_name=model_name,
        time_start=time_start,
        gage_id_lst=gage_id_lst,
        tp_data=tp_data,
    )
    logger.warning(pred)


def test_1D():
    full_data = {
        "model_name": "neimenggu_1D",
        "time_start": "2024-07-22-00",
        "gage_id_lst": ["neimenggu_11200260"],
        "data": [
            {"basin": "neimenggu_11200260", "time": "2024-07-22T01:00:00", "tp": 10.5},
            {"basin": "neimenggu_11200260", "time": "2024-07-22T02:00:00", "tp": 10.5},
            {"basin": "neimenggu_11200260", "time": "2024-07-22T04:00:00", "tp": 15.0},
        ],
    }
    model_name = full_data["model_name"]
    time_start = full_data["time_start"]
    gage_id_lst = full_data["gage_id_lst"]
    tp_data = full_data["data"]
    pred = model_infer(
        model_name=model_name,
        time_start=time_start,
        gage_id_lst=gage_id_lst,
        tp_data=tp_data,
    )
    logger.warning(pred)


if __name__ == "__main__":
    test_3h()
    # test_1D()
