import keras.backend as K
from keras.layers import Layer


class Attention(Layer):
	
	def __init__(self, attention_size=128, **kwargs):
		self.attention_size = attention_size
		super(Attention, self).__init__(**kwargs)
	
	# input_shape:(None, 10, 128), input_shape[-1]:128
	def build(self, input_shape):
		self.w = self.add_weight(name="w_{:s}".format(self.name), shape=(input_shape[-1], self.attention_size),
		                         initializer="glorot_normal", trainable=True)
		self.b = self.add_weight(name="b_{:s}".format(self.name), shape=(1, 1), initializer="zeros", trainable=True)
		self.u = self.add_weight(name="u_{:s}".format(self.name), shape=(self.attention_size, 1), initializer='glorot_normal', trainable=True)
		super().build(input_shape)
	
	def call(self, x, mask=None):
		et = K.tanh(K.dot(x, self.w) + self.b)
		at = K.softmax(K.squeeze(K.dot(et, self.u), axis=-1))
		if mask is not None:
			at *= K.cast(mask, K.floatx())
		atx = K.expand_dims(at, axis=-1)
		ot = atx * x
		return K.sum(ot, axis=1)
	
	def compute_output_shape(self, input_shape):
		return (input_shape[0], input_shape[-1])
	
	def compute_mask(self, input, input_mask=None):
		return None
	
	def _serialize_to_tensors(self):
		super()._serialize_to_tensors()
	
	def _restore_from_tensors(self, restored_tensors):
		super()._restore_from_tensors(restored_tensors)
