import tensorflow as tf
from tensorflow.python.keras.layers import Conv2D,MaxPool2D

from ZhengqiLoader import ZhengqiLoader


def hw_flatten(x):
    # Input shape x: [BATCH, HEIGHT, WIDTH, CHANNELS]
    # flat the feature volume across the width and height dimensions
    x_shape= tf.shape(x)
    return tf.reshape(x, [x_shape[0],-1, x_shape[-1]])# return [BATCH, W*H, CHANNELS]

class SelfAttention(tf.keras.Model):
  def __init__(self, number_of_filters, dtype=tf.float64):
    super(SelfAttention,self).__init__()
    self.cov_1= Conv2D(16, 3,
                    strides=1, padding='SAME', name="f_x",
                    activation=tf.nn.relu, dtype=dtype)
    self.maxpool = MaxPool2D(strides=[2, 2])
    self.f= Conv2D(number_of_filters//8,1,
                                     strides=1, padding='SAME', name="f_x",
                                     activation=None, dtype=dtype)

    self.g= Conv2D(number_of_filters//8,1,
                                     strides=1, padding='SAME', name="g_x",
                                     activation=None, dtype=dtype)

    self.h= Conv2D(number_of_filters,1,
                                     strides=1, padding='SAME', name="h_x",
                                     activation=None, dtype=dtype)

    self.gamma= tf.Variable(0., dtype=dtype, trainable=True, name="gamma")
    self.flatten= tf.keras.layers.Flatten()
    self.dense_1 = tf.keras.layers.Dense(40, activation=tf.nn.leaky_relu)
    self.dense_2 = tf.keras.layers.Dense(20, activation=tf.nn.leaky_relu)
    self.drop_out = tf.keras.layers.Dropout(0.3)
    self.dense_3 = tf.keras.layers.Dense(10, activation=tf.nn.leaky_relu)
    self.out = tf.keras.layers.Dense(6)

  def call(self, x):


    f= self.f(x)
    g= self.g(x)
    h= self.h(x)

    f_flatten= hw_flatten(f)
    g_flatten= hw_flatten(g)
    h_flatten= hw_flatten(h)
    s= tf.matmul(g_flatten, f_flatten, transpose_b=True)# [B,N,C] * [B, N, C] = [B, N, N]
    b= tf.nn.softmax(s, axis=-1)
    o= tf.matmul(b, h_flatten)
    y= self.gamma* tf.reshape(o, tf.shape(x)) + x
    flatten=self.flatten(y)
    dense_1 = self.dense_1(flatten)
    dense_2 = self.dense_2(dense_1)
    drop_out = self.drop_out(dense_2)
    dense_3 = self.dense_3(drop_out)
    out = self.out(dense_3)
    return out
if __name__ == '__main__':
    URL = './zhengqi_train.txt'
    BACH_SIZE = 10
    loader = ZhengqiLoader(URL)
    x_train_data, train_dataset, y_train, x_test_data, y_test = loader.preprocess(BACH_SIZE)
    print(x_train_data.shape)
    genertor = SelfAttention(number_of_filters=8)
    genertor.build(input_shape=(None, 5, 5, 1))
    genertor.summary()
