""" Full assembly of the parts to form the complete network """

import sys
import os

cur_path = os.path.split(__file__)[0]
sys.path.append(cur_path)
sys.path.append(os.path.join(cur_path, ".."))

from UNet.unet_parts import *
import torch
import numpy as np
import torch.nn as nn
from timm.models.layers import trunc_normal_
from NN.UNet.attention_unet import UNet3DWithSamplePoints
from NN.GNN.FiniteVolumeGN.EPDbackbone import EncoderProcesserDecoder


class Model(nn.Module):
    def __init__(
        self,
        space_dim=1,
        n_layers=5,
        n_hidden=256,
        dropout=0,
        n_head=8,
        act="gelu",
        mlp_ratio=1,
        fun_dim=1,
        out_dim=1,
        slice_num=32,
        ref=8,
        unified_pos=False,
        params=None,
    ):
        super(Model, self).__init__()

        self.unet_o_dim = n_hidden
        self.unet = UNet3DWithSamplePoints(
            in_channels=1,
            out_channels=self.unet_o_dim,
            hidden_channels=self.unet_o_dim,
            num_levels=4,
        )

        self.fvgn = EncoderProcesserDecoder(
            message_passing_num=params.message_passing_num,
            cell_input_size=params.cell_input_size,
            edge_input_size=params.edge_input_size,
            node_input_size=params.node_input_size,
            cell_output_size=params.cell_output_size,
            edge_output_size=params.edge_output_size,
            node_output_size=params.node_output_size,
            hidden_size=n_hidden,
            params=params,
        )

        self.last_layer = nn.Sequential(
            nn.Linear(2 * n_hidden, n_hidden * 4),
            nn.GELU(),
            nn.Linear(n_hidden * 4, out_dim),
        )

        self.initialize_weights()

    def initialize_weights(self):
        self.apply(self._init_weights)

    def _init_weights(self, m):
        if isinstance(m, nn.Linear):
            trunc_normal_(m.weight, std=0.02)
            if isinstance(m, nn.Linear) and m.bias is not None:
                nn.init.constant_(m.bias, 0)
        elif isinstance(m, (nn.LayerNorm, nn.BatchNorm1d)):
            nn.init.constant_(m.bias, 0)
            nn.init.constant_(m.weight, 1.0)

    def forward(
        self,
        x,
        graph_node,
        graph_edge=None,
        graph_cell=None,
        params=None,
        is_training=True,
    ):

        ufeatures = self.unet(
            graph_node.voxel_left,
            graph_node.query,
            graph_node=graph_node,
            graph_cell=graph_cell,
            half=True,
            is_training=is_training,
        )

        graph_feature = self.fvgn(
            graph_node,
            graph_edge,
            graph_cell,
            params=params,
            is_training=is_training,
        )

        fx = self.last_layer(torch.cat((ufeatures, graph_feature), dim=-1))

        return fx
