""" Full assembly of the parts to form the complete network """

import sys
import os

cur_path = os.path.split(__file__)[0]
sys.path.append(cur_path)
sys.path.append(os.path.join(cur_path, ".."))
from UNet.unet_parts import *
from Model import Physics_Attention_1D, MLP
import torch
import numpy as np
import torch.nn as nn
from timm.models.layers import trunc_normal_
from einops import rearrange, repeat
from torch_scatter import scatter_softmax, scatter
from NN.UNet.Unet_GINO import UNet3DWithSamplePoints
from NN.GNN.FiniteVolumeGN.EPDbackbone import EncoderProcesserDecoder
from utils.normalization import Normalizer
from NN.Transolver.Model import MLP
from torch_geometric import nn as nng


class SageNet(nn.Module):
    def __init__(self, n_hidden, n_layers):
        super(SageNet, self).__init__()

        self.hidden_layers = nn.ModuleList()
        for n in range(n_layers):
            self.hidden_layers.append(
                nng.SAGEConv(
                    in_channels=n_hidden,
                    out_channels=n_hidden,
                )
            )
        self.bns = nn.ModuleList()
        for n in range(n_layers):
            self.bns.append(nn.BatchNorm1d(n_hidden, track_running_stats=False))

    def forward(self, x, edge_index):
        res = x
        for bn, sage in zip(self.bns, self.hidden_layers):
            x = bn(x)
            x = sage(x, edge_index)
        return x + res


class Model(nn.Module):
    def __init__(
        self,
        space_dim=1,
        n_layers=5,
        n_hidden=256,
        dropout=0,
        n_head=8,
        act="gelu",
        mlp_ratio=1,
        fun_dim=1,
        out_dim=1,
        slice_num=32,
        ref=8,
        unified_pos=False,
        params=None,
    ):
        super(Model, self).__init__()

        self.n_hidden = n_hidden
        self.n_fvgn_hidden = self.n_hidden

        self.space_dim = space_dim

        self.node_enc = MLP(space_dim - 3, n_hidden=n_hidden, n_output=n_hidden, res=False)
        self.node_enc = MLP(space_dim - 3, n_hidden=n_hidden, n_output=n_hidden, res=False)
        self.resSage = SageNet(n_hidden=n_hidden, n_layers=4)

        self.node_dec = MLP(n_hidden + n_hidden, n_hidden * 4, out_dim, res=False)

        self.unet_o_dim = n_hidden
        self.unet = UNet3DWithSamplePoints(1, self.unet_o_dim, self.unet_o_dim, 4)

        self.initialize_weights()

    def initialize_weights(self):
        self.apply(self._init_weights)

    def _init_weights(self, m):
        if isinstance(m, nn.Linear):
            trunc_normal_(m.weight, std=0.02)
            if isinstance(m, nn.Linear) and m.bias is not None:
                nn.init.constant_(m.bias, 0)
        elif isinstance(m, (nn.LayerNorm, nn.BatchNorm1d)):
            nn.init.constant_(m.bias, 0)
            nn.init.constant_(m.weight, 1.0)

    def forward(self, x, graph_node, graph_edge=None, graph_cell=None, params=None):
        num_samples = params.num_samples

        batch_size = graph_node.num_graphs
        edge_index = nng.knn(
            graph_cell.pos,
            graph_cell.pos,
            k=4,
            batch_x=graph_cell.batch,
            batch_y=graph_cell.batch,
        )
        selfloop_mask = edge_index[0] != edge_index[1]

        edge_index = edge_index[:, selfloop_mask]
        x = x[:, 3:]
        z = self.node_enc(x)
        z = self.resSage(z, edge_index)

        half = False
        ufeatures = self.unet.forward(graph_node.voxel, graph_cell.query, half)
        if batch_size != 1:
            ufeatures = ufeatures.reshape(batch_size * num_samples, -1)
        else:
            ufeatures = ufeatures.squeeze()

        z = self.node_dec(torch.cat([ufeatures, z], dim=-1))
        # z = z.reshape(batch_size * num_samples, 1)
        return z
