import numpy as np
from scipy.stats import norm
import matplotlib.pyplot as plt

from core.config import no_grad
from core.cuda import cuda, cuda_module
from core.data.dataloader import DataLoader
from implement.datasource.minist import MNIST
from implement.models.vae import VAE
from implement.optimizers.adam import Adam
use_gpu = cuda.gpu_enable
max_epoch = 10
batch_size = 16
latent_size = 2



def show_digits(epoch=0):
    """Display a 2D manifold of the digits"""
    n = 15  # 15x15 digits
    digit_size = 28
    figure = np.zeros((digit_size * n, digit_size * n))
    grid_x = norm.ppf(np.linspace(0.05, 0.95, n))
    grid_y = norm.ppf(np.linspace(0.05, 0.95, n))

    for i, yi in enumerate(grid_x):
        for j, xi in enumerate(grid_y):
            z_sample = np.array([[xi, yi]])
            if use_gpu:
                z_sample = cuda.to_cupy(z_sample)
            with no_grad():
                x_decoded = vae.decoder(z_sample)
            if use_gpu:
                x_decoded.data = cuda.to_numpy(x_decoded.data)
            digit = x_decoded.data.reshape(digit_size, digit_size)
            figure[i * digit_size: (i + 1) * digit_size,
            j * digit_size: (j + 1) * digit_size] = digit

    plt.figure(figsize=(10, 10))
    plt.axis('off')
    plt.imshow(figure, cmap='Greys_r')
    plt.show()
    #plt.savefig('vae_{}.png'.format(epoch))


vae = VAE(latent_size)
optimizer = Adam().setup(vae)

transform = lambda x: (x / 255.0).astype(np.float32)
train_set = MNIST(train=True, transform=transform)
train_loader = DataLoader(train_set, batch_size)

if use_gpu:
    vae.to_gpu()
    train_loader.to_gpu()
    xp = cuda_module

for epoch in range(max_epoch):
    avg_loss = 0
    cnt = 0

    for x, t in train_loader:
        cnt += 1

        loss = vae(x)
        vae.cleargrads()
        loss.backward()
        optimizer.update()

        avg_loss += loss.data
        interval = 100 if use_gpu else 10
        if cnt % interval == 0:
            epoch_detail = epoch + cnt / train_loader.max_iter
            print('epoch: {:.2f}, loss: {:.4f}'.format(epoch_detail,
                                                       float(avg_loss / cnt)))

    show_digits(epoch)