import torch
import torch.optim
import torch.utils.data as Data
from BPRMF.DataLoader import DataLoader
from BPRMF.Model import MF, PairwiseLoss
from tqdm import tqdm

class Trainer:
    def __init__(self, hyper_params):
        self.dataset = DataLoader(hyper_params).generate_dataset()
        self.mf_model = MF(hyper_params).to(hyper_params['device'])
        self.hyper_params = hyper_params

    def fit(self, save_model=True):
        print('BPRMF: Training Model...')
        train_loader = Data.DataLoader(
            dataset=self.dataset,
            batch_size=self.hyper_params['batch_size'],
            shuffle=True
        )
        optimizer = torch.optim.AdamW(
            self.mf_model.parameters(), weight_decay=1e-2)
        loss_func = PairwiseLoss(self.hyper_params, self.mf_model)

        for epoch in range(self.hyper_params['epochs']):
            total_loss = 0.0
            total_step = 0

            for _, (pos_item, neg_item, user_id) in enumerate(tqdm(train_loader)):
                pos_item = pos_item.to(self.hyper_params['device'])
                neg_item = neg_item.to(self.hyper_params['device'])
                user_id = user_id.to(self.hyper_params['device'])

                loss = loss_func.get_loss(
                    pos_item, neg_item, user_id)
                total_loss += loss.item()
                total_step += 1

                optimizer.zero_grad()
                loss.backward()
                optimizer.step()

            print(f'BPRMF: Epoch: {epoch}, Loss: {total_loss/total_step}')

        if save_model:
            torch.save(self.mf_model.state_dict(), 'BPRMF/model_dat/model.pkl')


if __name__ == '__main__':
    hyper_params = {
        'dataset_path': '../datasets/lib.txt',
        'batch_size': 128,
        'epochs': 10,
        'embed_dim': 32,
        'neg_sample_cnt': 1000,
        'epsilon': 0.1,
    }

    trainer = Trainer(hyper_params)
    trainer.fit()
