import torch
import torch.optim
import torch.utils.data as Data
from BPR.DataLoader import DataLoader
from BPR.Model import MF, PairwiseLoss
from tqdm import tqdm


class Trainer:
    def __init__(self, hyper_params):
        self.dataset = DataLoader(hyper_params).generate_dataset()
        self.mf_model = MF(hyper_params)
        self.hyper_params = hyper_params

    def fit(self, save_model=True):
        print('Training Model...')
        train_loader = Data.DataLoader(
            dataset=self.dataset,
            batch_size=self.hyper_params['batch_size'],
            shuffle=True
        )
        optimizer = torch.optim.AdamW(
            self.mf_model.parameters(), weight_decay=1e-2)
        loss_func = PairwiseLoss(self.hyper_params, self.mf_model)

        for epoch in range(self.hyper_params['epochs']):
            total_loss = 0.0
            total_step = 0

            for _, (pos_item, neg_item, item_weight, user_id, pos_extra, neg_extra) in enumerate(tqdm(train_loader)):
                loss = loss_func.get_loss(
                    pos_item, neg_item, item_weight, user_id, pos_extra, neg_extra)
                total_loss += loss.item()
                total_step += 1

                optimizer.zero_grad()
                loss.backward()
                optimizer.step()

            print(f'Epoch: {epoch}, Loss: {total_loss/total_step}')

        if save_model:
            torch.save(self.mf_model.state_dict(), 'BPR/model_dat/model.pkl')


if __name__ == '__main__':
    hyper_params = {
        'dataset_path': 'datasets/lib.txt',
        'batch_size': 128,
        'epochs': 10,
        'embed_dim': 32,
        'apply_weight': True,
        'neg_sample_cnt': 1000,
        'epsilon': 0.1,
        'extra_dim': 4,
        'apply_extra': True,
        'extra_weight': 0.01
    }

    trainer = Trainer(hyper_params)
    trainer.fit()
