import torch
import numpy as np
import torch.nn as nn
import torch_geometric.nn as nng
from timm.models.layers import trunc_normal_
from einops import rearrange, repeat
from torch_geometric.data import Data
from utils.normalization import Normalizer
import os
import sys

cur_path = os.path.split(__file__)[0]
sys.path.append(cur_path)


class FVGN(nn.Module):
    def __init__(self, params) -> None:
        super().__init__()

        self.params = params
        if self.params.net == "Attu-FVGN":
            from FVGNAttUnet import Model

        self.nn = Model(
            space_dim=params.node_input_size,
            n_hidden=params.hidden_size,
            n_layers=3,
            fun_dim=0,
            n_head=4,
            mlp_ratio=2,
            out_dim=params.node_output_size,
            slice_num=32,
            unified_pos=0,
            params=params,
        )

    def forward(
        self,
        graph_node: Data = None,
        target="vel_press",
        params=None,
    ):

        pred_vel,pred_press,pred_= self.nn(
            graph_node=graph_node,
            params=params,
            target=target,
        )
        return pred_vel,pred_press,pred_
        # B C D H W

    def load_checkpoint(
        self, optimizer=None, scheduler=None, ckpdir=None, device=None, is_training=True
    ):
        if ckpdir is None:
            ckpdir = self.model_dir
        dicts = torch.load(ckpdir, map_location=device)
        self.load_state_dict(dicts["model"])
        keys = list(dicts.keys())
        keys.remove("model")

        if optimizer is not None and "optimizer0" in dicts:
            if not isinstance(optimizer, list):
                optimizer = [optimizer]
            for i, o in enumerate(optimizer):
                if f"optimizer{i}" in dicts:
                    o.load_state_dict(dicts[f"optimizer{i}"])
                    keys.remove(f"optimizer{i}")

        if scheduler is not None and "scheduler0" in dicts:
            if not isinstance(scheduler, list):
                scheduler = [scheduler]
            for i, s in enumerate(scheduler):
                if f"scheduler{i}" in dicts:
                    s.load_state_dict(dicts[f"scheduler{i}"])
                    keys.remove(f"scheduler{i}")

        if not is_training:
            keys = [
                key
                for key in keys
                if not (key.startswith("optimizer") or key.startswith("scheduler"))
            ]

        print(f"Simulator model and optimizer/scheduler loaded checkpoint {ckpdir}")

    def save_checkpoint(self, path=None, optimizer=None, scheduler=None):
        
        if path is None:
            path = self.model_dir

        to_save = {"model": self.state_dict()}

        if optimizer is not None:
            if not isinstance(optimizer, list):
                optimizer = [optimizer]
            for i, o in enumerate(optimizer):
                to_save[f"optimizer{i}"] = o.state_dict()

        if scheduler is not None:
            if not isinstance(scheduler, list):
                scheduler = [scheduler]
            for i, s in enumerate(scheduler):
                to_save[f"scheduler{i}"] = s.state_dict()

        torch.save(to_save, path)
        
        print(f"Simulator model saved at {path}")
