# -*- coding: utf-8 -*-
# @Time    : 2018/3/31 19:27
# @Author  : Tianchiyue
# @File    : layers.py
# @Software: PyCharm Community Edition
from keras import backend as K
from keras.engine import Layer
import tensorflow as tf
from keras import initializers, activations


class ConnectAspectLayer(Layer):
    """
    use mask concatenate sentence with aspect
    # Input shape
        sentence:batch_size,time_steps,embedding_dims
        aspect:batch_size,1,embedding_dims
    # Output shape
        batch_size,time_steps,embedding_dims*2
    """
    def __init__(self, **kwargs):
        self.support_mask = True
        super(ConnectAspectLayer, self).__init__(**kwargs)

    def call(self, x, mask=None):
        sentence = x[0]
        aspect_vector = x[1]  # 32x1*100
        aspect_vector = K.repeat_elements(aspect_vector, K.int_shape(sentence)[1], axis=1)  # 32x24x100
        aspect_vector = aspect_vector * tf.to_float(K.expand_dims(mask[0], 2))

        return K.concatenate([sentence, aspect_vector])

    def compute_output_shape(self, input_shape):

        return tuple([input_shape[0][0], input_shape[0][1], input_shape[0][2] + input_shape[1][2]])

    def compute_mask(self, x, mask=None):
        if mask:
            return mask[0]
        else:
            return None


class AttentionLayer(Layer):
    """
    # Input shape
        batch_size,time_steps,hidden_dims
        batch_size,time_steps
    """
    def __init__(self,
                 activation='tanh',
                 use_bias=False,
                 match_func='bilinear',
                 **kwargs):
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.support_mask = True
        self.match_func = match_func
        super(AttentionLayer, self).__init__(**kwargs)

    def build(self, input_shape):
        self.W = self.add_weight(name='kernel',
                                 shape=(input_shape[0][2], input_shape[0][2]),
                                 initializer=initializers.RandomUniform(minval=-0.1, maxval=0.1),
                                 trainable=True)
        if self.use_bias:
            self.bias = self.add_weight(name='bias',
                                        shape=(1,),
                                        initializer=initializers.RandomUniform(minval=-0.1, maxval=0.1),
                                        trainable=True)
        else:
            self.bias = None
        super(AttentionLayer, self).build(input_shape)

    def call(self, x, mask=None):
        assert self.match_func in ['bilinear', 'dot']
        h, t = x[0], x[1]
        if self.match_func == 'bilinear':
            hw = K.dot(h, self.W)
            output = K.batch_dot(hw, t, axes=[2, 1])
        if self.match_func == 'dot':
            output = K.batch_dot(h, t, axes=[2, 1])
        # todo match_func concat:vtanh(w[h;t])
        if self.use_bias:
            output = K.bias_add(output, self.bias)
        if self.activation is not None:
            output = self.activation(output)
        atten = self.softmask(output, mask[0])
        return atten

    def compute_output_shape(self, input_shape):
        return tuple([input_shape[0][0], input_shape[0][1]])

    def compute_mask(self, x, mask=None):
        if mask:
            return mask[0]
        else:
            return None

    def softmask(self, x, mask, axis=-1):
        """
        softmax with mask, used in attention mechanism others
        :param x:
        :param mask:
        :param axis:
        :return:
        """
        y = K.exp(x)
        if mask is not None:
            y = y * tf.to_float(mask)
        sumx = K.sum(y, axis=axis, keepdims=True) + 1e-6
        x = y / sumx
        return K.relu(x)


class LocationAttentionLayer(Layer):
    """
    # Input shape
        batch_size,time_steps,hidden_dims
    # Output shape
        batch_size,time_steps
    """
    def __init__(self,
                 activation=None,
                 use_bias=True,
                 **kwargs):
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.support_mask = True
        super(LocationAttentionLayer, self).__init__(**kwargs)

    def build(self, input_shape):
        self.W = self.add_weight(name='kernel',
                                 shape=(input_shape[2], 1),
                                 initializer=initializers.RandomUniform(minval=-0.1, maxval=0.1),
                                 trainable=True)
        if self.use_bias:
            self.bias = self.add_weight(name='bias',
                                        shape=(1,),
                                        initializer=initializers.RandomUniform(minval=-0.1, maxval=0.1),
                                        trainable=True)
        else:
            self.bias = None
        super(LocationAttentionLayer, self).build(input_shape)

    def call(self, x, mask=None):
        output = K.dot(x, self.W)
        # todo match_func concat:vtanh(w[h;t])
        if self.use_bias:
            output = K.bias_add(output, self.bias)
        output = K.batch_flatten(output)
        if self.activation is not None:
            output = self.activation(output)
        atten = self.softmask(output, mask)
        return atten

    def compute_output_shape(self, input_shape):
        return tuple([input_shape[0], input_shape[1]])

    def compute_mask(self, x, mask=None):
        if mask is not None:
            return mask
        else:
            return None

    def softmask(self, x, mask, axis=-1):
        """
        softmax with mask, used in attention mechanism others
        :param x:
        :param mask:
        :param axis:
        :return:
        """
        y = K.exp(x)
        if mask is not None:
            y = y * tf.to_float(mask)
        sumx = K.sum(y, axis=axis, keepdims=True) + 1e-6
        x = y / sumx
        return K.relu(x)


class ClearMaskLayer(Layer):
    """
    after using a layer that supports masking in keras,
    you can use this layer to remove the mask before softmax layer
    """

    def __init__(self, **kwargs):
        self.supports_masking = True
        super(ClearMaskLayer, self).__init__(**kwargs)

    def compute_output_shape(self, input_shape):
        return input_shape

    def compute_mask(self, x, mask=None):
        return None
