# -*- coding: utf-8 -*-
"""
Created on Thu Apr 21 23:20:41 2022

@author: Jovi Wong
"""

from AutoEncoder import AutoEncoder
from TensorDataset import TensorDataset
from torch.utils.data import DataLoader
import torch.nn.functional as F
import os
import torch

class AETrainer():  
    def __init__(self, ae:AutoEncoder, input_rep_path:str, penalty:torch.tensor, rep_save_dir = "rep_cache"):
        print("begin to init AE trainer")
        self.model = ae
        self.penalty = penalty
        self.input_rep_path = input_rep_path
        rep_out_name = ae.model_name + "-" +str(ae.hidden_dim) + "-rep.pt"
        self.save_rep_path = os.path.join(rep_save_dir, rep_out_name) 
        device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
        self.model.to(device)

    def train(self, epochs:int):
        device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
        rep = torch.load(self.input_rep_path).to(device)
        dataset = TensorDataset(rep)
        dataloader = DataLoader(dataset=dataset, batch_size=8, shuffle=True)
        optimizer = torch.optim.Adam(self.model.parameters())
        node_cnt = 0
        for epoch in range(epochs):
            print("<< epochs: {0}/{1} >>".format(epoch+1, epochs))
            node_cnt = 0
            for idx, in_rep in dataloader:
                optimizer.zero_grad()
                out_rep = self.model(in_rep)
                if self.penalty[idx].shape == out_rep.shape:
                    out_rep = self.penalty[idx] * self.penalty[idx] * out_rep
                    in_rep = self.penalty[idx] * self.penalty[idx] * in_rep
                loss_score = F.l1_loss(in_rep, out_rep)
                loss_score.backward()
                optimizer.step()
                node_cnt = node_cnt + 8
                if node_cnt % 800 == 0:
                    print("calculate nodes {0}/{1} ".format(node_cnt, len(dataset)))
        
        nodes_rep = torch.zeros(len(dataset), self.model.hidden_dim).cuda()
        for idx, in_rep in dataloader:
            nodes_rep[idx] = self.model.encode(in_rep)

        if not os.path.exists(os.path.split(self.save_rep_path)[0]):
            os.mkdir(os.path.split(self.save_rep_path)[0])
        torch.save(nodes_rep, self.save_rep_path)

        print("Saving auto-encoder {0}'s parameters to file {1}".format(self.model.model_name, self.save_rep_path))
        self.model.save_params()
        torch.cuda.empty_cache()
        del self.model