'''
@Time    : 2022/3/17 14:07
@Author  : Fu Junyu
@Site    : www.fujunyu.cn
@File    : star-gcn-model.py
@Software: PyCharm
'''
import numpy as np
import mxnet as mx
from mxnet.gluon import nn
from mxgraph.graph import HeterGraph, merge_node_ids_dict, set_seed
from mxgraph.layers import HeterGCNLayer, StackedHeterGCNLayers, LayerDictionary, InnerProductLayer
from mxgraph.layers.common import get_activation


class Net(nn.Block):
    def __init__(self, all_graph, name_user, name_item, **kwargs):
        super(Net, self).__init__(**kwargs)
        self._name_user = name_user
        self._name_item = name_item
        self._act = get_activation(_MODEL.ACTIVATION)
        with self.name_scope():
            if _MODEL.USE_EMBED:
                self.embed_layers = LayerDictionary(prefix='embed_')
                with self.embed_layers.name_scope():
                    for key, fea in all_graph.features.items():
                        self.embed_layers[key] = nn.Embedding(input_dim=fea.shape[0],
                                                              output_dim=_EMBED.UNITS,
                                                              weight_initializer=mx.init.Uniform(0.1),
                                                              prefix='{}_'.format(key))
            if _MODEL.USE_FEA_PROJ:
                self.fea_mappings = LayerDictionary(prefix='fea_map_')
                with self.fea_mappings.name_scope():
                    for key in all_graph.features:
                        self.fea_mappings[key] = nn.HybridSequential()
                        self.fea_mappings[key].add(nn.Dense(units=_FEA.MID_MAP,
                                                          flatten=False, prefix='{}_l0_'.format(key)))
                        self.fea_mappings[key].add(get_activation(_MODEL.ACTIVATION))
                        self.fea_mappings[key].add(nn.Dense(units=_FEA.UNITS,
                                                          flatten=False, prefix='{}_l1_'.format(key)))


            # Construct Encoder
            self.encoders = nn.Sequential(prefix='enc_')
            with self.encoders.name_scope():
                num_enc_blocks = 1 if _MODEL.USE_RECURRENT else _MODEL.NBLOCKS
                for block_id in range(num_enc_blocks):
                    recurrent_layer_num = len(_GCN.AGG.UNITS) if _GCN.USE_RECURRENT else None
                    encoder = StackedHeterGCNLayers(recurrent_layer_num=recurrent_layer_num,
                                                    prefix='b{}_'.format(block_id))
                    with encoder.name_scope():
                        for i, (agg_units, out_units) in enumerate(zip(_GCN.AGG.UNITS, _GCN.OUT.UNITS)):
                            if not _GCN.USE_RECURRENT and not _MODEL.USE_DAE\
                                    and (i == len(_GCN.AGG.UNITS) - 1):
                                source_keys = [name_user, name_item] ### For HeterGCN without link prediction training
                            else:
                                source_keys = all_graph.meta_graph.keys()

                            encoder.add(HeterGCNLayer(meta_graph=all_graph.meta_graph,
                                                      multi_link_structure=all_graph.get_multi_link_structure(),
                                                      dropout_rate=_GCN.DROPOUT,
                                                      agg_units=agg_units,
                                                      out_units=out_units,
                                                      source_keys=source_keys,
                                                      agg_accum=_GCN.AGG.ACCUM,
                                                      agg_act=_MODEL.ACTIVATION,
                                                      out_act=_MODEL.ACTIVATION,
                                                      prefix='l{}_'.format(i)))
                            if _GCN.USE_RECURRENT:
                                # In the recurrent formula, we will only create one layer
                                break
                    self.encoders.add(encoder)

            # Construct Decoder
            if _MODEL.USE_DAE:
                num_dec_blocks = 1 if _MODEL.USE_RECURRENT else _MODEL.NBLOCKS
                # Generate the embed_map
                self.embed_maps = nn.Sequential(prefix='embed_maps_')
                with self.embed_maps.name_scope():
                    if _MODEL.USE_FEA_PROJ and _MODEL.RECON_FEA:
                        out_emb_units = _EMBED.UNITS + _FEA.UNITS
                    else:
                        out_emb_units = _EMBED.UNITS
                    for block_id in range(num_dec_blocks):
                        embed_map = LayerDictionary(prefix='b{}_'.format(block_id))
                        with embed_map.name_scope():
                            for key in all_graph.meta_graph:
                                embed_map[key] = nn.HybridSequential(prefix='{}_'.format(key))
                                with embed_map[key].name_scope():
                                    embed_map[key].add(nn.Dense(units=out_emb_units, flatten=False,
                                                                prefix='{}_l0_'.format(key)))
                                    embed_map[key].add(get_activation(_MODEL.ACTIVATION))
                                    embed_map[key].add(nn.Dense(units=out_emb_units, flatten=False,
                                                                prefix='{}_l1_'.format(key)))
                        self.embed_maps.add(embed_map)


            self.rating_user_projs = nn.Sequential(prefix='rating_user_proj_')
            self.rating_item_projs = nn.Sequential(prefix='rating_item_proj_')
            for rating_proj in [self.rating_user_projs, self.rating_item_projs]:
                with rating_proj.name_scope():
                    num_blocks = 1 if _MODEL.USE_RECURRENT else _MODEL.NBLOCKS
                    for block_id in range(num_blocks):
                        ele_proj = nn.HybridSequential(prefix='b{}_'.format(block_id))
                        with ele_proj.name_scope():
                            ele_proj.add(nn.Dense(units=_GEN_RATING.MID_MAP,
                                                  flatten=False))
                        rating_proj.add(ele_proj)

            self.gen_ratings = InnerProductLayer(prefix='gen_rating')


    def get_embed(self, ctx, node_ids_dict, embed_noise_dict=None, use_mask=True):
        """ Generate the embedding of the nodes

        Parameters
        ----------
        node_ids_dict : dict
            Dictionary that contains the ids of the nodes that need to be embedded
            Inner values should be mx.nd.ndarrays
        feature_dict : dict
            Dictionary that contains the features of the nodes
        embed_noise_dict : dict
            Dictionary that contains the noise information.
            There are two possible values:
                -1 --> mask to zero
                i --> use the embedding vector as in the ith-node
            Inner values should be mx.nd.ndarrays
        use_mask : bool
            Whether to mask the embeddings

        Returns
        -------
        embedding_dict : dict
            Dictionary that contains the node embeddings
        """
        assert _MODEL.USE_EMBED
        embedding_dict = dict()
        for key, node_ids in node_ids_dict.items():
            node_ids = mx.nd.array(node_ids, ctx=ctx, dtype=np.int32)
            if use_mask:
                node_ids = mx.nd.take(embed_noise_dict[key], node_ids)
                mask = (node_ids != -1)
                node_ids = node_ids * mask
            embedding = self.embed_layers[key](node_ids)
            if use_mask:
                embedding = embedding * mx.nd.reshape(mask, shape=(-1, 1)).astype(np.float32)
            embedding_dict[key] = embedding
        return embedding_dict

    def get_feature(self, ctx, node_ids_dict, feature_dict):
        assert _MODEL.USE_FEA_PROJ
        out_fea_dict = dict()
        for key, node_ids in node_ids_dict.items():
            out_fea_dict[key] = self.fea_mappings[key](mx.nd.take(feature_dict[key],
                                                                  mx.nd.array(node_ids, ctx=ctx, dtype=np.int32)))
        return out_fea_dict


    def forward(self, graph, feature_dict, rating_node_pairs=None,
                embed_noise_dict=None, recon_node_ids_dict=None, graph_sampler_args=None, symm=None):
        """

        Parameters
        ----------
        graph : HeterGraph
        feature_dict : dict
            Dictionary contains the base features of all nodes
        rating_node_pairs : np.ndarray or None
            Shape: (2, #Edges), First row is user and the second row is item
        embed_noise_dict : dict or None
            Dictionary that contains the noises of all nodes that is used to replace the node ids for masked embedding
            {key: (#all node ids, ) the shape and order is the same as the node ids in the whole graph}
        recon_node_ids_dict: dict or None
            Dictionary that contains the nodes ids that we need to reconstruct the embedding
        all_masked_node_ids_dict : dict or None
            Dictionary that contains the node ids of all masked nodes
        graph_sampler_args : dict or None
            Arguments for graph sampler
        symm : bool
            Whether to calculate the support in the symmetric formula

        Returns
        -------
        pred_ratings : list of mx.nd.ndarray
            The predicted ratings. If we use the stacked hourglass AE structure.
             it will return a list with multiple predicted ratings
        pred_embeddings : list of dict
            The predicted embeddings. Return a list of predicted embeddings
             if we use the stacked hourglass AE structure.
        gt_embeddings : dict
            The ground-truth embedding of the target node ids.
        """
        if symm is None:
            symm = _GCN.AGG.NORM_SYMM
        ctx = next(iter(feature_dict.values())).context
        req_node_ids_dict = dict()
        encoder_fwd_plan = [None for _ in range(_MODEL.NBLOCKS)]
        encoder_fwd_indices = [None for _ in range(_MODEL.NBLOCKS)]
        pred_ratings = []
        pred_embeddings = []
        block_req_node_ids_dict = [None for _ in range(_MODEL.NBLOCKS)]
        if embed_noise_dict is not None:
            nd_embed_noise_dict = {key: mx.nd.array(ele, ctx=ctx, dtype=np.int32)
                                   for key, ele in embed_noise_dict.items()}
        else:
            nd_embed_noise_dict = None
        if recon_node_ids_dict is not None:
            gt_embeddings = self.get_embed(ctx=ctx,
                                           node_ids_dict=recon_node_ids_dict,
                                           embed_noise_dict=nd_embed_noise_dict,
                                           use_mask=False)
            if _MODEL.USE_FEA_PROJ and _MODEL.RECON_FEA:
                gt_fea = self.get_feature(ctx=ctx,
                                          node_ids_dict=recon_node_ids_dict,
                                          feature_dict=feature_dict)
                for key in gt_embeddings:
                    gt_embeddings[key] = mx.nd.concat(gt_embeddings[key], gt_fea[key])
        else:
            gt_embeddings = dict()
        # From top to bottom, generate the forwarding plan
        for block_id in range(_MODEL.NBLOCKS - 1, -1, -1):
            # Backtrack the encoders
            encoder = self.encoders[0] if _MODEL.USE_RECURRENT else self.encoders[block_id]
            if rating_node_pairs is not None and recon_node_ids_dict is not None:
                uniq_node_ids_dict, encoder_fwd_indices[block_id] = \
                    merge_node_ids_dict([{self._name_user : rating_node_pairs[0],
                                          self._name_item: rating_node_pairs[1]},
                                         recon_node_ids_dict,
                                         req_node_ids_dict])
            elif rating_node_pairs is not None and recon_node_ids_dict is None:
                uniq_node_ids_dict, encoder_fwd_indices[block_id] = \
                    merge_node_ids_dict([{self._name_user: rating_node_pairs[0],
                                          self._name_item: rating_node_pairs[1]},
                                         req_node_ids_dict])
            elif rating_node_pairs is None and recon_node_ids_dict is not None:
                uniq_node_ids_dict, encoder_fwd_indices[block_id] = \
                    merge_node_ids_dict([recon_node_ids_dict, req_node_ids_dict])
            else:
                raise NotImplementedError
            block_req_node_ids_dict[block_id] = req_node_ids_dict
            req_node_ids_dict, encoder_fwd_plan[block_id]\
                = encoder.gen_plan(graph=graph,
                                   sel_node_ids_dict=uniq_node_ids_dict,
                                   graph_sampler_args=graph_sampler_args,
                                   symm=symm)

        # From bottom to top, calculate the forwarding results
        if _MODEL.USE_EMBED:
            input_dict = self.get_embed(ctx=ctx,
                                        node_ids_dict=req_node_ids_dict,
                                        embed_noise_dict=nd_embed_noise_dict,
                                        use_mask=embed_noise_dict is not None)
        if _MODEL.USE_FEA_PROJ:
            fea_dict = self.get_feature(ctx=ctx,
                                        node_ids_dict=req_node_ids_dict,
                                        feature_dict=feature_dict)
            if _MODEL.USE_EMBED:
                for key in input_dict:
                    input_dict[key] = mx.nd.concat(input_dict[key], fea_dict[key], dim=-1)
            else:
                input_dict = fea_dict

        for block_id in range(_MODEL.NBLOCKS):
            encoder = self.encoders[0] if _MODEL.USE_RECURRENT else self.encoders[block_id]
            output_dict = encoder.heter_sage(input_dict, encoder_fwd_plan[block_id])
            if rating_node_pairs is not None and recon_node_ids_dict is not None:
                rating_idx_dict, recon_idx_dict, req_idx_dict = encoder_fwd_indices[block_id]
            elif rating_node_pairs is not None and recon_node_ids_dict is None:
                rating_idx_dict, req_idx_dict = encoder_fwd_indices[block_id]
            elif rating_node_pairs is None and recon_node_ids_dict is not None:
                recon_idx_dict, req_idx_dict = encoder_fwd_indices[block_id]
            else:
                raise NotImplementedError

            # Generate the predicted ratings
            if rating_node_pairs is not None:
                rating_user_fea = mx.nd.take(output_dict[self._name_user],
                                             mx.nd.array(rating_idx_dict[self._name_user], ctx=ctx, dtype=np.int32))
                rating_item_fea = mx.nd.take(output_dict[self._name_item],
                                             mx.nd.array(rating_idx_dict[self._name_item], ctx=ctx, dtype=np.int32))
                user_proj = self.rating_user_projs[0] if _MODEL.USE_RECURRENT else self.rating_user_projs[block_id]
                item_proj = self.rating_item_projs[0] if _MODEL.USE_RECURRENT else self.rating_item_projs[block_id]
                rating_user_fea = user_proj(rating_user_fea)
                rating_item_fea = item_proj(rating_item_fea)
                block_pred_ratings = self.gen_ratings(rating_user_fea, rating_item_fea)
                pred_ratings.append(block_pred_ratings)

            # Decoder
            if recon_node_ids_dict is not None:
                embed_map = self.embed_maps[0] if _MODEL.USE_RECURRENT else self.embed_maps[block_id]
                # Generate the predicted embeddings
                block_pred_embeddings = dict()
                for key, idx in recon_idx_dict.items():
                    block_pred_embeddings[key] = \
                        embed_map[key](mx.nd.take(output_dict[key], mx.nd.array(idx, ctx=ctx, dtype=np.int32)))
                pred_embeddings.append(block_pred_embeddings)
            if block_id < _MODEL.NBLOCKS - 1 and _MODEL.USE_DAE:
                # Generate the Input Embeddings of the next layer
                embed_map = self.embed_maps[0] if _MODEL.USE_RECURRENT else self.embed_maps[block_id]
                input_dict = dict()
                for key, idx in req_idx_dict.items():
                    input_dict[key] = embed_map[key](mx.nd.take(output_dict[key], mx.nd.array(idx, ctx=ctx, dtype=np.int32)))
                if _MODEL.USE_FEA_PROJ and not _MODEL.RECON_FEA:
                    fea_dict = self.get_feature(ctx=ctx, node_ids_dict=block_req_node_ids_dict[block_id],
                                                feature_dict=feature_dict)
                    for key in input_dict:
                        input_dict[key] = mx.nd.concat(input_dict[key], fea_dict[key], dim=-1)

        return pred_ratings, pred_embeddings, gt_embeddings

