# -*- encoding: utf-8 -*-
'''
@File    :   decoders.py
@Time    :   2021/11/22 9:05
@Author  :   ZhangChaoYang
@Desc    :   解码器
'''

import tensorflow as tf
from tensorflow.keras import Sequential, Model
from tensorflow.keras.layers import Dense, BatchNormalization, Reshape, Conv2DTranspose, ReLU


class FCDecoder(Model):
    '''
    由多层全连接构成的解码器
    '''

    def __init__(self, input_shape, hidden_dims, activation=tf.nn.relu, batch_norm=True):
        super(FCDecoder, self).__init__()
        seq_len, feature_count = input_shape
        layers = []
        for hidden_dim in hidden_dims[-2::-1]:
            if batch_norm:
                layers.append(BatchNormalization())
            layers.append(Dense(units=hidden_dim, activation=activation))
        if batch_norm:
            layers.append(BatchNormalization())
        # decode的最后一层不加激活函数，不对输出的范围作约束
        layers.append(Dense(units=seq_len * feature_count))
        layers.append(Reshape(target_shape=(seq_len, feature_count)))
        self.main = Sequential(layers)

    def call(self, inputs, training=None, mask=None):
        return self.main(inputs)


class ConvDecoder(Model):
    def __init__(self, height, width, kernel_size, strides, filters, active_layer=ReLU):
        super(ConvDecoder, self).__init__()
        # 计算经过最后一层卷积后，图片大小变成了多少
        final_height = height
        final_width = width
        for i in range(len(filters)):
            final_height //= strides[0]
            final_width //= strides[1]
        layers = [Reshape(target_shape=(final_height, final_width, filters[-1]))]
        for filter in filters[-2::-1]:
            layers.append(Conv2DTranspose(filters=filter, kernel_size=kernel_size, strides=strides, padding='same',
                                          use_bias=False))
            layers.append(BatchNormalization(momentum=0.9, epsilon=1e-5))
            layers.append(active_layer())
        layers.append(Conv2DTranspose(filters=1, kernel_size=kernel_size,
                                      strides=strides, padding='same', use_bias=False,
                                      activation=tf.nn.sigmoid))  # 最后一层用sigmoid，因为能量谱都是正数
        self.main = Sequential(layers)

    def call(self, inputs):
        return self.main(inputs)
