'''
Author: tomwoo tom.woo@outlook.com
Date: 2025-08-04 11:15:57
LastEditors: tomwoo tom.woo@outlook.com
LastEditTime: 2025-08-31 16:45:05
FilePath: /Weather_Forecast_Agent/forecast_tools/forecast.py
Description: 这是默认设置,请设置`customMade`, 打开koroFileHeader查看配置 进行设置: https://github.com/OBKoro1/koro1FileHeader/wiki/%E9%85%8D%E7%BD%AE
'''

import io
import os
from collections import OrderedDict
from datetime import date, time, datetime, timezone, timedelta
from pathlib import Path

import numpy as np
import matplotlib.animation as animation
import torch
import xarray as xr
from PIL import Image
import PIL.GifImagePlugin as GifImagePlugin
from earth2studio import run
from earth2studio.data import DataSource, GFS
from earth2studio.io import IOBackend, KVBackend
from earth2studio.models.px import PrognosticModel, SFNO
from earth2studio.utils.coords import CoordSystem

from forecast_tools.plot import animate_local


animations_date = None
data_async_timeout = 600 # unit: second
max_num_trials = 10
variables = [{"short_name": "msl", "ch_name": "平均海平面气压", "unit": "hPa", "convert_fn": lambda x: x / 100}, 
             {"short_name": "t2m", "ch_name": "2米处气温", "unit": "°C", "convert_fn": lambda x: x - 273.15}, 
             {"short_name": "tcwv", "ch_name": "可降水量", "unit": "kg/m^2", "convert_fn": lambda x: x}]


def get_current_datetime(timezone_delta: float = 0.0) -> tuple[datetime, datetime]:
    utc_datetime = datetime.now(timezone.utc)
    return utc_datetime, utc_datetime + timedelta(hours=timezone_delta)

def numpy_datetime64_to_datetime_date(src: np.datetime64) -> date:
    return date.fromisoformat(np.datetime_as_string(src, unit='D'))

def try_to_run_deterministic(
        date_begin: date, 
        nsteps: int, 
        prognostic: PrognosticModel, 
        data: DataSource, 
        io: IOBackend, 
        output_coords: CoordSystem = OrderedDict({}), 
        device: torch.device | None = None, 
        date_end: date | None = None, 
        max_num_trials: int = 10
) -> IOBackend | None:
    """Built in deterministic workflow with different dates.
    This workflow creates a determinstic inference pipeline to produce a forecast
    prediction using a prognostic model with different dates.

    Parameters
    ----------
    date_begin : date
        The first date, with which the workflow creates
    nsteps : int
        Number of forecast steps
    prognostic : PrognosticModel
        Prognostic model
    data : DataSource
        Data source
    io : IOBackend
        IO object
    output_coords: CoordSystem, optional
        IO output coordinate system override, by default OrderedDict({})
    device : torch.device, optional
        Device to run inference on, by default None
    date_end : date, optional
        The last date, with which the workflow creates (exclusive)
    max_num_trials : int, optional
        The maximum number of trials

    Returns
    -------
    IOBackend
        Output IO object
    """
    datetime_begin = datetime.combine(date_begin, time())
    datetime_end = datetime.combine(date_end, time()) if date_end is not None else None
    datetime_ = datetime_begin
    num_trials = 0

    while (num_trials < max_num_trials) and (datetime_ > datetime_end if datetime_end else True):
        try:
            return run.deterministic([datetime_], nsteps, prognostic, data, io, output_coords, device)
        except Exception as e:
            num_trials += 1
            datetime_ -= timedelta(days=1)

    if num_trials >= max_num_trials:
        raise Exception(f"The number of trials ({num_trials}) reaches the maximum value ({max_num_trials})!")
    elif datetime_end:
        if datetime_ <= datetime_end:
            raise Exception(f"The trial date ({datetime_.date()}) is NOT later than the end date ({date_end})!")

def try_to_generate_forecast(
        dataset_filename: str, 
        num_days: int, 
        lat_lon: tuple[float, float], 
        timezone_delta: float, 
        images_path: str, 
        image_ext: str
) -> list[str | None]:
    global animations_date

    # 1. Get the current UTC date. -> 2
    utc_datetime = get_current_datetime()[0]
    utc_date = utc_datetime.date()
    print(f"The current UTC date is {utc_date}.")

    # 2. Check whether the date of animations is the current UTC date: yes -> end, no -> 3
    end_flag = (animations_date == utc_date) if animations_date else False

    if end_flag:
        print(f"The animations ({animations_date}) are up to date ({utc_date}).")
        return [None] * len(variables)

    # 3. Check whether dataset file exists: yes -> 6, no -> 4
    if not Path(dataset_filename).exists():
        print("The dataset file does not exist.")
        # 4. Try to create a determinstic inference pipeline with different dates: success -> 5, failure -> end
        try:
            model = SFNO.load_model(SFNO.load_default_package())
            data = GFS(async_timeout=data_async_timeout)
            io = KVBackend()
            io = try_to_run_deterministic(utc_date, 4 * num_days, model, data, io, max_num_trials=max_num_trials)
        except Exception as e:
            print(e)
            return [None] * len(variables)

        # 5. Write dataset file. -> 10
        ds = io.to_xarray()
        ds = ds[[var["short_name"] for var in variables]]
        # print(ds)
        ds.to_netcdf(dataset_filename)
        dataset_date = numpy_datetime64_to_datetime_date(ds.time[0])
        print(f"The dataset file ({dataset_date}) saved.")
    else:
        print("The dataset file exists.")
        # 6. Read dataset file. -> 7
        ds = xr.open_dataset(dataset_filename)
        dataset_date = numpy_datetime64_to_datetime_date(ds.time[0])
        print(f"The dataset file ({dataset_date}) read.")
        # print(ds)

        # 7. Check whether ds.time[0] is equal to the current UTC date: yes -> 10, no -> 8
        if dataset_date != utc_date:
            print(f"The dataset file ({dataset_date}) is out of date ({utc_date}).")
            # 8. Try to create a determinstic inference pipeline with different dates: success -> 9, failure -> 10
            write_flag = False
            try:
                model = SFNO.load_model(SFNO.load_default_package())
                data = GFS(async_timeout=data_async_timeout)
                io = KVBackend()
                io = try_to_run_deterministic(utc_date, 4 * num_days, model, data, io, date_end=dataset_date, max_num_trials=max_num_trials)
                write_flag = True
            except Exception as e:
                print(e)

            if write_flag:
                # 9. Write dataset file. -> 10
                ds = io.to_xarray()
                ds = ds[[var["short_name"] for var in variables]]
                # print(ds)
                ds.to_netcdf(dataset_filename)
                dataset_date = numpy_datetime64_to_datetime_date(ds.time[0])
                print(f"The dataset file ({dataset_date}) saved.")

    # 10. check whether the date of dataset is NOT later than that of animations, if exists: y -> end, n -> 11
    if animations_date and dataset_date <= animations_date:
        print(f"The date of dataset ({dataset_date}) is NOT later than that of animations ({animations_date}).")
        return [None] * len(variables)

    # 11. Generate animations. Set the date of animations. -> end
    image_filenames = []
    for var in variables:
        image_filename = os.path.join(images_path, var["short_name"]) + image_ext
        ani = animate_local(var["convert_fn"](eval(f"ds.{var["short_name"]}")), 
                            ds.time[0], np.timedelta64(int(60 * timezone_delta), 'm'), 
                            loc_lat=lat_lon[0], loc_lon=lat_lon[1] if lat_lon[1] >= 0 else lat_lon[1] + 360, 
                            cb_label=f"{var["ch_name"]} [{var["unit"]}]")
        ani.save(image_filename, writer="pillow")
        print()
        image_filenames.append(image_filename)

    animations_date = dataset_date
    print(f"The animations [{animations_date}, ({lat_lon[0]}°, {lat_lon[1]}°)] are generated.")

    return image_filenames

def matplotlib_animation_FuncAnimation_to_PIL_GifImagePlugin_GifImageFile(animation: animation.FuncAnimation) -> GifImagePlugin.GifImageFile | None:
    """直接将动画转换为内存中的GIF对象"""
    frames = []

    # 生成帧并写入内存缓冲区
    for frame in animation.new_frame_seq():
        animation._draw_frame(frame)
        buf = io.BytesIO()
        animation._fig.canvas.print_figure(buf)
        buf.seek(0)
        frames.append(Image.open(buf))

    if len(frames) == 0:
        return None

    gif_buf = io.BytesIO()

    frames[0].save(
        gif_buf,
        format='GIF',
        save_all=True,
        append_images=frames[1:],
        duration=animation._interval,
        loop=0
    )
    gif_buf.seek(0)
    return Image.open(gif_buf)

def generate_animations(
        dataset_filename: str, 
        num_days: int, 
        lat_lon: tuple[float, float], 
        timezone_delta: float, 
        interval: int
) -> list[GifImagePlugin.GifImageFile | None]:
    # 1. Check whether dataset file exists: yes -> 2, no -> end
    if not Path(dataset_filename).exists():
        print("The dataset file does NOT exist!")
        raise FileNotFoundError
    else:
        print("The dataset file exists.")
        # 2. Read dataset file. -> 3
        ds = xr.open_dataset(dataset_filename)
        dataset_date = numpy_datetime64_to_datetime_date(ds.time[0])
        print(f"The dataset file ({dataset_date}) read.")
        # print(ds)

    # 3. Generate animations. -> end
    images = []
    for var in variables:
        ani = animate_local(var["convert_fn"](eval(f"ds.{var["short_name"]}")), 
                            ds.time[0], np.timedelta64(int(60 * timezone_delta), 'm'), 
                            loc_lat=lat_lon[0], loc_lon=lat_lon[1] if lat_lon[1] >= 0 else lat_lon[1] + 360, 
                            max_frames=4 * num_days + 1, 
                            cb_label=f"{var["ch_name"]} [{var["unit"]}]", 
                            interval=interval)
        image = matplotlib_animation_FuncAnimation_to_PIL_GifImagePlugin_GifImageFile(ani)
        print()
        images.append(image)

    print(f"The animations [{dataset_date}, ({lat_lon[0]}°, {lat_lon[1]}°)] are generated.")

    return images

# end of file
