
from keras.backend import shape
from keras.datasets import fashion_mnist
from keras.layers import Input,Dense,Reshape,Flatten,Dropout
from keras.layers import BatchNormalization,Activation,ZeroPadding2D
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.convolutional import UpSampling2D,Conv2D
from keras.optimizers import Adam
from keras.models import Sequential,Model
import matplotlib.pyplot as plt
import sys
import numpy as np

class GAN():
    def __init__(self) -> None:
        self.img_rows = 28
        self.img_cols = 28
        self.channels = 1
        self.img_shape = (self.img_rows,self.img_cols,self.channels)
        self.latent_dim = 100
        optimizer = Adam(0.0002, 0.5)
        self.discriminator = self.build_discrimnator()
        self.discriminator.compile(loss='binary_crossentropy',
                        optimizer=optimizer,metrics=['accuracy'])
        self.generator = self.build_generator()
        z = Input(shape=(self.latent_dim,))
        img = self.generator(z)
        validity = self.discriminator(img)
        self.combined = Model(z,validity)
        self.combined.compile(loss='binary_crossentropy', optimizer=optimizer,)
        self.d_loss_list = []
        self.d_acc_list = []
        self.g_loss_list = []

    def build_generator(self):
        model = Sequential()
        model.add(Dense(256,input_dim=self.latent_dim))
        model.add(LeakyReLU(alpha=0.2))
        model.add(BatchNormalization(momentum=0.8))
        model.add(Dense(512))
        model.add(LeakyReLU(alpha=0.2))
        model.add(BatchNormalization(momentum=0.8))
        model.add(Dense(1024))
        model.add(LeakyReLU(alpha=0.2))
        model.add(BatchNormalization(momentum=0.8))
        model.add(Dense(np.prod(self.img_shape),activation='tanh'))
        model.add(Reshape(self.img_shape))
        model.summary()
        noise = Input(shape=(self.latent_dim,))
        img = model(noise)
        return Model(noise,img)
    
    def build_discrimnator(self):
        model = Sequential()
        model.add(Flatten(input_shape=self.img_shape))
        model.add(Dense(512))
        model.add(LeakyReLU(alpha=0.2))
        model.add(Dense(256))
        model.add(LeakyReLU(alpha=0.2))
        model.add(Dense(1,activation='sigmoid'))
        model.summary()
        img = Input(shape=self.img_shape)
        validity = model(img)
        return Model(img,validity)
    
    def train(self,epochs,batch_size=128,sample_interval=50):
        (X_train, _),(_,_) = fashion_mnist.load_data()
        # rescale -1 to 1
        X_train = X_train /127.5 - 1
        X_train = np.expand_dims(X_train,axis=3)
        valid = np.ones((batch_size,1))
        fake = np.zeros((batch_size,1))
        for epoch in range(epochs):
            idx = np.random.randint(0,X_train.shape[0],batch_size)
            imgs = X_train[idx]
            noise = np.random.normal(0,1,(batch_size,self.latent_dim))
            gen_imgs = self.generator.predict(noise)
            d_loss_real = self.discriminator.train_on_batch(imgs,valid)
            d_loss_fake = self.discriminator.train_on_batch(gen_imgs,fake)
            d_loss = 0.5 * np.add(d_loss_fake,d_loss_real)
            self.d_loss_list.append(d_loss[0])
            self.d_acc_list.append(d_loss[1])
            noise = np.random.normal(0,1,(batch_size,self.latent_dim))
            g_loss = self.combined.train_on_batch(noise,valid)
            self.g_loss_list.append(g_loss)
            print("%d [D loss: %f, accuracy:%.2f%%][G loss:%f]" % (epoch, d_loss[0], d_loss[1]*100, g_loss))
            if epoch % 1000 == 0:
                plt.imshow(gen_imgs[0].reshape(28,28),cmap=plt.get_cmap('gray'))
                plt.show()

    
    def plot_loss(self):
        plt.subplot(221)
        plt.title("discrime loss")
        plt.plot(self.d_loss_list)
        plt.subplot(222)
        plt.title("discrime acc")
        plt.plot(self.d_acc_list)
        plt.subplot(223)
        plt.title("gene loss")
        plt.plot(self.g_loss_list)

gan = GAN()
gan.train(epochs = 1000)
gan.plot_loss()