import torch
from torch import nn
import torchvision

from dataset import AALDataset


def euclidean_distance(x, y):
    return torch.sqrt(torch.sum(torch.square(x - y), dim=-1, keepdim=True))


class Dis(nn.Module):
    def __init__(self, spec_shape):
        super(Dis, self).__init__()
        self.vgg = torchvision.models.vgg16(False)
        self.conv1 = nn.Sequential(
            nn.Conv2d(3, 3, kernel_size=3),
            nn.ReLU(),
            nn.AvgPool2d(kernel_size=2, stride=2)
        )
        self.dis = nn.Sequential(
            nn.Linear(spec_shape[1] * spec_shape[2] * spec_shape[3], 1024),
            nn.Linear(1024, 256),
            nn.Linear(256, 256),
        )
        self.fc1 = nn.Linear(256, 1)
 
    def forward(self, *x):
        x1, x2 = x
        x1 = torch.transpose(x1, 1, 3)
        x2 = torch.transpose(x2, 1, 3)
        

        x1 = x1.reshape(x1.size()[0], -1)
        x1 = self.dis(x1)
        

        x2 = x2.reshape(x2.size()[0], -1)
        x2 = self.dis(x2)
        x = euclidean_distance(x1, x2)
        return x
    

class SiameseNetwork(nn.Module):
    def __init__(self, spec_shape):
        super().__init__()
        self.cnn1 = nn.Sequential(
            nn.ReflectionPad2d(1),
            nn.Conv2d(3, 4, kernel_size=3),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(4),
            
            nn.ReflectionPad2d(1),
            nn.Conv2d(4, 8, kernel_size=3),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(8),

            nn.ReflectionPad2d(1),
            nn.Conv2d(8, 8, kernel_size=3),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(8),
        )

        self.fc1 = nn.Sequential(
            nn.Linear(1728720, 500),
            nn.ReLU(inplace=True),

            nn.Linear(500, 500),
            nn.ReLU(inplace=True),

            nn.Linear(500, 5))

    def forward_once(self, x):
        x = torch.transpose(x, 1, 3)
        output = self.cnn1(x)
        output = output.view(output.size()[0], -1)
        output = self.fc1(output)
        return output

    def forward(self, input1, input2):
        output1 = self.forward_once(input1)
        output2 = self.forward_once(input2)
        x = euclidean_distance(output1, output2)
        return x


def constractive_loss(y_true, y_pred):
    return torch.mean(y_true * torch.square(y_pred) + (1 - y_true) * torch.square(torch.maximum(1 - y_pred, torch.zeros_like(y_pred))))


if __name__ == '__main__':
    cwd = 'C:/dev_spa/DMuse/202202b6'
    
    config = {
        'dataset_config': {
            'train_sample_dir': cwd,
            'test_sample_dir': cwd,
            'train_label_dir': cwd,
            'test_label_dir': cwd,
            'compress_rate': 100.,
        },
        'batch_size': 8,
        'shuffle': True,
    }
    
    dataset = AALDataset(mode='train', **config['dataset_config'])
    
    dataloader = torch.utils.data.DataLoader(
        dataset=dataset, batch_size=config['batch_size'], shuffle=config['shuffle']
    )
    x1, x2, y = iter(dataloader).__next__()
    
    dis = SiameseNetwork(x1.size())
    # print(dis)
    # print(output)
    
    optimizer = torch.optim.Adam(dis.parameters(), lr=0.0003)
    
    for i, (x1, x2, y) in enumerate(dataloader):
        output = dis(x1, x2)
        loss = constractive_loss(y, output)
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()
        print('loss: {}'.format(loss.data.item()))
        if (i + 1) % 10 == 0:
            torch.save(dis.state_dict(), './dis.pth')
