import mindspore as ms
import mindspore.nn as nn
from src.Module.egnn.network import EGNN

# Set global seed for reproducibility\ nms.set_seed(12345)

class GNN_model(nn.Cell):
    def __init__(self, gnn_config, args):
        super(GNN_model, self).__init__()
        # store configs
        self.gnn_config = gnn_config
        self.args = args

        # override config values if provided
        if hasattr(args, "layer_num"):
            self.gnn_config["n_layers"] = args.layer_num
        if hasattr(args, "dropout"):
            self.gnn_config["dropout"] = args.dropout
        if hasattr(args, "problem_type"):
            self.gnn_config["problem_type"] = args.problem_type

        # compute dims
        self.out_dim = self._calculate_output_dim()
        self.input_dim = self._calculate_input_dim()

        # build EGNN
        if "egnn" in args.gnn:
            self.GNN_model = EGNN(self.gnn_config, self.input_dim, self.out_dim)
        else:
            raise KeyError(f"No implementation for GNN type '{args.gnn}'")

    def construct(self, batch_graph):
        # forward pass
        return self.GNN_model(batch_graph)

    def _calculate_input_dim(self):
        # number of node features
        return 31

    def _calculate_output_dim(self):
        # base output classes (20) plus optional features
        output_dim = 20
        if getattr(self.args, 'use_sasa', False):
            output_dim += 1
        if getattr(self.args, 'use_bfactor', False):
            output_dim += 1
        if getattr(self.args, 'use_dihedral', False):
            output_dim += 6
        if getattr(self.args, 'use_coordinate', False):
            output_dim += 3
        return output_dim
