from typing import Union, List

import tensorflow as tf
from numpy import ndarray
from scipy.sparse import spmatrix
from tensorflow.keras import activations, initializers, constraints
from tensorflow.keras import regularizers
from tensorflow.keras.layers import Layer

from tensor_utils import to_dense_tensor, save_dense_in_sparse_json

tensor = tf.Tensor


def get_tensor(mat: Union[tensor, ndarray, spmatrix, dict]):
    if isinstance(mat, tensor):
        return tf.cast(mat, dtype=tf.float32)
    else:
        return to_dense_tensor(mat, dtype=tf.float32)


class GraphConvolution(Layer):
    def __init__(self, units: Union[int, List[int]], graph: Union[tensor, ndarray, spmatrix, dict],
                 activation=None,
                 use_bias=False,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 **kwargs):
        if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            kwargs['input_shape'] = (kwargs.pop('input_dim'),)
        super(GraphConvolution, self).__init__(**kwargs)
        self.units = units
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.kernel = None
        self.bias = None
        self.degree = None
        self.graph = get_tensor(graph)

    def compute_output_shape(self, input_shape):
        assert input_shape and len(input_shape) == 3
        assert input_shape[-1]
        output_shapes = list(input_shape)
        if type(self.units) is list:
            output_shapes[-1] = self.units[-1]
        else:
            output_shapes[-1] = self.units
        return tuple(output_shapes)

    def build(self, input_shape):
        assert len(input_shape) == 3
        assert input_shape[1] == self.graph.shape[0] and self.graph.shape[0] == self.graph.shape[1]
        input_dim = input_shape[-1]

        if type(self.units) is list:
            dims = input_dim + self.units
        else:
            dims = [input_dim, self.units]
        self.kernel = []
        for i in range(1, len(dims)):
            weight = self.add_weight(shape=(dims[i - 1] * 2, dims[i]),
                                     initializer=self.kernel_initializer,
                                     name='kernel_{}'.format(i),
                                     regularizer=self.kernel_regularizer,
                                     constraint=self.kernel_constraint)
            self.kernel.append(weight)

        if self.use_bias:
            self.bias = [self.add_weight(shape=(dims[i],),
                                         initializer=self.bias_initializer,
                                         name='bias_{}'.format(i),
                                         regularizer=self.bias_regularizer,
                                         constraint=self.bias_constraint) for i in range(1, len(dims))]

        else:
            self.bias = None
        self.degree = tf.linalg.l2_normalize(tf.reduce_sum(self.graph, axis=0))
        super(GraphConvolution, self).build(input_shape)
        self.built = True

    def call(self, inputs, **kwargs):
        feature_vectors = inputs[0]
        for i in range(len(self.kernel)):
            weight = self.kernel[i]
            main_part = tf.multiply(tf.expand_dims(self.degree, 1), feature_vectors)
            neighbor_part = tf.matmul(self.graph, feature_vectors)
            whole = tf.concat([main_part, neighbor_part], axis=1)
            feature_vectors = tf.matmul(whole, weight)
            if self.use_bias:
                bias = self.bias[i]
                feature_vectors += bias
            if self.activation is not None:
                feature_vectors = self.activation(feature_vectors)
            feature_vectors = tf.linalg.l2_normalize(feature_vectors, axis=1)
        return tf.expand_dims(feature_vectors, 0)

    def get_config(self):
        config = {'units': self.units,
                  'activation': activations.serialize(self.activation),
                  'use_bias': self.use_bias,
                  'kernel_initializer': initializers.serialize(
                      self.kernel_initializer),
                  'bias_initializer': initializers.serialize(
                      self.bias_initializer),
                  'kernel_regularizer': regularizers.serialize(
                      self.kernel_regularizer),
                  'bias_regularizer': regularizers.serialize(
                      self.bias_regularizer),
                  'activity_regularizer': regularizers.serialize(
                      self.activity_regularizer),
                  'kernel_constraint': constraints.serialize(
                      self.kernel_constraint),
                  'bias_constraint': constraints.serialize(self.bias_constraint),
                  'graph': save_dense_in_sparse_json(self.graph)}

        base_config = super(GraphConvolution, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))


class AsymmetricGraphConvolution(Layer):
    def __init__(self, units: int, graph: Union[ndarray, tensor],
                 activation=None,
                 use_bias=False,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 **kwargs):
        if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            kwargs['input_shape'] = (kwargs.pop('input_dim'),)
        super(AsymmetricGraphConvolution, self).__init__(**kwargs)
        self.units = units
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.kernel = None
        self.bias = None
        self.degree = None
        self.graph = get_tensor(graph)

    def compute_output_shape(self, input_shape):
        assert input_shape and len(input_shape) == 3
        assert input_shape[-1]
        output_shapes = list(input_shape)
        output_shapes[-1] = self.units
        return tuple(output_shapes)

    def build(self, input_shape):
        assert len(input_shape) == 3
        assert input_shape[1] == self.graph.shape[1]
        input_dim = input_shape[-1]

        self.kernel = self.add_weight(shape=(input_dim, self.units),
                                      initializer=self.kernel_initializer,
                                      name='kernel',
                                      regularizer=self.kernel_regularizer,
                                      constraint=self.kernel_constraint)

        if self.use_bias:
            self.bias = self.add_weight(shape=(self.units,),
                                        initializer=self.bias_initializer,
                                        name='bias',
                                        regularizer=self.bias_regularizer,
                                        constraint=self.bias_constraint)

        else:
            self.bias = None
        self.degree = tf.linalg.l2_normalize(tf.reduce_sum(self.graph, axis=0))
        super(AsymmetricGraphConvolution, self).build(input_shape)
        self.built = True

    def call(self, inputs, **kwargs):
        feature_vectors = inputs[0]
        feature_vectors = tf.multiply(tf.expand_dims(self.degree, 1), feature_vectors)
        neighbor = tf.matmul(self.graph, feature_vectors)
        feature_vectors = tf.matmul(neighbor, self.kernel)
        if self.use_bias:
            feature_vectors += self.bias
        if self.activation is not None:
            feature_vectors = self.activation(feature_vectors)
        feature_vectors = tf.linalg.l2_normalize(feature_vectors, axis=1)

        return tf.expand_dims(feature_vectors, 0)

    def get_config(self):
        config = {'units': self.units,
                  'activation': activations.serialize(self.activation),
                  'use_bias': self.use_bias,
                  'kernel_initializer': initializers.serialize(
                      self.kernel_initializer),
                  'bias_initializer': initializers.serialize(
                      self.bias_initializer),
                  'kernel_regularizer': regularizers.serialize(
                      self.kernel_regularizer),
                  'bias_regularizer': regularizers.serialize(
                      self.bias_regularizer),
                  'activity_regularizer': regularizers.serialize(
                      self.activity_regularizer),
                  'kernel_constraint': constraints.serialize(
                      self.kernel_constraint),
                  'bias_constraint': constraints.serialize(self.bias_constraint),
                  'graph': save_dense_in_sparse_json(self.graph)}

        base_config = super(AsymmetricGraphConvolution, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))


class DynamicGraphConvolution(Layer):
    def __init__(self, units: int, graph: Union[ndarray, tensor],
                 activation=None,
                 use_bias=False,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 **kwargs):
        if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            kwargs['input_shape'] = (kwargs.pop('input_dim'),)
        super(DynamicGraphConvolution, self).__init__(**kwargs)
        self.units = units
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.kernel = None
        self.bias = None
        self.graph = get_tensor(graph)

    def compute_output_shape(self, input_shape):
        assert input_shape and len(input_shape) == 3
        assert input_shape[-1]
        output_shapes = list(input_shape)
        output_shapes[-1] = self.units
        return tuple(output_shapes)

    def build(self, input_shape):
        assert len(input_shape) == 3
        assert input_shape[1] == self.graph.shape[0]
        input_dim = input_shape[-1]

        self.kernel = self.add_weight(shape=(input_dim * 2, self.units),
                                      initializer=self.kernel_initializer,
                                      name='kernel',
                                      regularizer=self.kernel_regularizer,
                                      constraint=self.kernel_constraint)

        if self.use_bias:
            self.bias = self.add_weight(shape=(self.units,),
                                        initializer=self.bias_initializer,
                                        name='bias',
                                        regularizer=self.bias_regularizer,
                                        constraint=self.bias_constraint)

        else:
            self.bias = None
        super(DynamicGraphConvolution, self).build(input_shape)
        self.built = True

    def call(self, inputs, **kwargs):
        degree = tf.linalg.l2_normalize(tf.reduce_sum(self.graph, axis=0))
        feature_vectors = inputs[0]
        main_part = tf.multiply(tf.expand_dims(degree, 1), feature_vectors)
        neighbor_part = tf.matmul(self.graph, feature_vectors)
        whole = tf.concat([main_part, neighbor_part], axis=1)
        feature_vectors = tf.matmul(whole, self.kernel)
        if self.use_bias:
            feature_vectors += self.bias
        if self.activation is not None:
            feature_vectors = self.activation(feature_vectors)
        feature_vectors = tf.linalg.l2_normalize(feature_vectors, axis=1)

        return tf.expand_dims(feature_vectors, 0)

    def update_graph(self, graph):
        self.graph = get_tensor(graph)

    def get_config(self):
        config = {'units': self.units,
                  'activation': activations.serialize(self.activation),
                  'use_bias': self.use_bias,
                  'kernel_initializer': initializers.serialize(
                      self.kernel_initializer),
                  'bias_initializer': initializers.serialize(
                      self.bias_initializer),
                  'kernel_regularizer': regularizers.serialize(
                      self.kernel_regularizer),
                  'bias_regularizer': regularizers.serialize(
                      self.bias_regularizer),
                  'activity_regularizer': regularizers.serialize(
                      self.activity_regularizer),
                  'kernel_constraint': constraints.serialize(
                      self.kernel_constraint),
                  'bias_constraint': constraints.serialize(self.bias_constraint),
                  'graph': save_dense_in_sparse_json(self.graph)}

        base_config = super(DynamicGraphConvolution, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
