# -*- coding: utf-8 -*-
"""
Created on Thu Apr 21 23:15:50 2022

@author: Jovi WOng
"""

import os
import torch
import torch.nn as nn

class AutoEncoder(nn.Module):
    def __init__(self, 
                 name, 
                 in_dim:int, 
                 hidden_dim:int, 
                 activate_func=torch.tanh, 
                 params_save_dir:str="params_cache"):
        super().__init__()
        self.model_name = name
        self.file_name = name + "-" + str(in_dim) + "-" + str(hidden_dim) + ".pth"
        if not os.path.exists(params_save_dir):
            os.mkdir(params_save_dir)
        self.params_path = os.path.join(params_save_dir, self.file_name)
        self.encoder = nn.Linear(in_features=in_dim, out_features=hidden_dim, bias=True).to()
        self.decoder = nn.Linear(in_features=hidden_dim, out_features=in_dim, bias=True)
        self.in_dim = in_dim
        self.hidden_dim = hidden_dim
        self.activate_func = activate_func
        self.params_save_dir = params_save_dir,
        self.params_init()

    
    def forward(self, x):
        x = self.encoder(x)
        x = self.activate_func(x)
        x = self.decoder(x)
        x = self.activate_func(x)
        return x
    
    def encode(self, x):
        x = self.encoder(x)
        x = self.activate_func(x)
        return x
    
    def decode(self, x):
        x = self.decoder(x)
        x = self.activate_func(x)
        return x

    def save_params(self):
        torch.save(self.state_dict(), self.params_path)
    
    def params_init(self):
        if os.path.exists(self.params_path):
            self.load_state_dict(torch.load(self.params_path))

        else:
            nn.init.kaiming_normal_(self.encoder.weight.data)
            nn.init.constant_(self.encoder.bias.data, 0.0)
            nn.init.kaiming_normal_(self.decoder.weight.data)
            nn.init.constant_(self.decoder.bias.data, 0.0)