# 搭建神经网络，由全连接层和转置卷积层，实现结构的预测
import tensorflow as tf
import numpy as np
import json
from tensorflow.keras import layers, Sequential, losses, optimizers, regularizers
from tensorflow import keras
import os
import matplotlib.pyplot as plt
import glob
from tqdm import tqdm
from dataset_self import *


# 搭建网络（全连接）
class MySpecNet(tf.keras.Model):
    def __init__(self, in_num=6, out_num=200):
        super(MySpecNet, self).__init__()
        self.fc = [layers.Dense(100, activation=tf.nn.relu),
                   layers.Dense(2000, activation=tf.nn.relu),
                   layers.Dense(2000, activation=tf.nn.relu),
                   layers.Dropout(0.4),
                   layers.Dense(2000, activation=tf.nn.relu),
                   layers.Dense(2000, activation=tf.nn.relu),
                   layers.Dropout(0.4),
                   layers.Dense(2000, activation=tf.nn.relu),
                   layers.Dense(2000, activation=tf.nn.relu),
                   layers.Dropout(0.4),
                   layers.Dense(1000, activation=tf.nn.relu),
                   layers.Dense(256)]

    def call(self, inputs):
        # inputs = inputs[:, 1:]
        mid_var = inputs
        for net_i in self.fc:
            mid_var = net_i(mid_var)
        # out = tf.nn.conv1d(
        #     tf.reshape(mid_var, shape=(tf.shape(mid_var)[0], tf.shape(mid_var)[1], 1)),
        #     filters=tf.ones((2, 1, 1)),
        #     stride=1,
        #     padding='SAME')  # 平滑输出
        out = mid_var
        out = out[:, 29:230]  # 201的输出长度
        return tf.squeeze(out)


# # 搭建网络，全连接加卷积
# class MySpecNet(tf.keras.Model):
#     def __init__(self, in_num=6, out_num=200):
#         super(MySpecNet, self).__init__()
#         self.fc1 = layers.Dense(512, activation=tf.nn.relu)
#         self.res_block = Sequential([layers.Dense(512, activation=tf.nn.relu),
#                                      layers.Dense(512, activation=tf.nn.relu),
#                                      layers.Dense(512, activation=tf.nn.relu)])
#
#         self.conv1 = Sequential([layers.Conv2D(3, (4, 1), 1, 'same', use_bias=False),
#                                  layers.BatchNormalization()])
#
#         self.conv2 = Sequential([layers.Conv2D(32, (4, 1), 1, 'same', use_bias=False),
#                                  layers.BatchNormalization(),
#                                  layers.MaxPooling2D((2, 1), 1, 'valid'),
#                                  layers.ReLU(),
#                                  layers.Conv2D(64, (4, 1), 1, 'same', use_bias=False),
#                                  layers.BatchNormalization(),
#                                  layers.MaxPooling2D((2, 1), 1, 'valid'),
#                                  layers.ReLU(),
#                                  layers.Conv2D(256, (4, 1), 1, 'same', use_bias=False),
#                                  layers.BatchNormalization(),
#                                  layers.MaxPooling2D((2, 1), 1, 'valid'),
#                                  layers.ReLU(),
#                                  ])
#         self.ave = layers.GlobalAveragePooling2D()
#         # self.gru = layers.GRU(256)
#
#     def call(self, inputs):
#         b = inputs.shape[0]
#         x = inputs
#         x = self.fc1(x)
#         x_res1 = self.res_block(x)
#         x = tf.add(x, x_res1)
#         x_res2 = self.res_block(x)
#         x = tf.add(x, x_res2)
#         x = tf.reshape(x, (b, -1, 1, 1))
#         x = tf.nn.relu(self.conv1(x))
#         out = self.ave(self.conv2(x))
#         return out[:, 29:229]


# VAE网络
class SpecVAE(tf.keras.Model):
    def __init__(self, z_dim=4):
        super(SpecVAE, self).__init__()
        self.z_dim = z_dim
        self.fc_en = Sequential([layers.Dense(600, activation=tf.nn.relu),
                                 layers.Dense(600, activation=tf.nn.relu)])
        self.fc_en1 = Sequential([layers.Dense(300, activation=tf.nn.relu),
                                  layers.Dense(self.z_dim*2)])

        self.fc_de = Sequential([layers.Dense(300, activation=tf.nn.relu),
                                 layers.Dense(600, activation=tf.nn.relu),
                                 layers.Dense(600, activation=tf.nn.relu),
                                 layers.Dense(7)])

    def encoder(self, inputs):
        x = inputs
        x = self.fc_en(x)
        x = self.fc_en1(x)
        sig = x[:, 0:self.z_dim]
        mu = x[:, self.z_dim:]
        return mu, sig

    def decoder(self, z):
        x = z
        x = self.fc_de(x)
        return x

    def re_para(self, mu, sig):
        # 重参数化
        eps = tf.random.normal(sig.shape)
        std = tf.exp(sig) ** 0.5
        out = mu + std * eps
        return out

    def call(self, inputs, inputs_2):
        # inputs: 输入结构参数
        # inputs_2: 添加的反射谱中心、线宽、峰值
        # 在中间隐藏层除了通过随机采样eps，添加谱线的特征[b, n],如谱线中心、线宽、峰值
        x = inputs
        y = tf.cast(inputs_2, dtype=tf.float32)
        mu1, sig1 = self.encoder(x)  # 通过编码器
        z = self.re_para(mu1, sig1)  # 重整参数
        z_in = tf.concat([z, y], axis=1)
        out = self.decoder(z_in)
        return out, mu1, sig1


