import torch
from Trainer import Trainer
from Model import MF
from FileLoader import FileLoader
import time


class Recommender:
    def __init__(self, hyper_params):
        self.hyper_params = hyper_params
        self.file_loader = FileLoader(hyper_params)
        self.mf_model = MF(hyper_params)

        try:
            self.mf_model.load_state_dict(torch.load('model_dat/model.pkl'))
            print('Model loaded')
        except Exception as err:
            print(err)

            trainer = Trainer(hyper_params)
            trainer.mf_model = self.mf_model
            trainer.fit(save_model=True)

        self.item_embed = self.mf_model.item_embedding.weight
        self.user_embed = self.mf_model.user_embedding.weight

        # Generate coarse recommendation results
        self.coarse_recommend()

    def coarse_recommend(self):
        coarse_count = 500
        if 'coarse_count' in self.hyper_params:
            coarse_count = self.hyper_params['coarse_count']

        item_cnt = self.hyper_params['item_cnt']
        user_cnt = self.hyper_params['user_cnt']

        self.coarse_result = torch.empty(
            [user_cnt, coarse_count], dtype=torch.long)

        batch_size = self.hyper_params['batch_size']
        batch_cnt = user_cnt // batch_size

        for i in range(batch_cnt):
            left = i * batch_size
            right = left + batch_size
            if i == batch_cnt - 1:
                right = user_cnt

            cur_user = self.user_embed[left:right].unsqueeze(1).expand(
                [-1, item_cnt, -1])
            mf_result = torch.sum(cur_user*self.item_embed, dim=-1)
            self.coarse_result[left:right] = torch.argsort(
                mf_result, dim=-1, descending=True)[:, :coarse_count]

    def fine_recommend(self, user_id, extra_info=None, fast_recommend=True):
        if extra_info is None:
            extra_info = torch.zeros(
                [1, self.hyper_params['extra_dim']], dtype=torch.float32)

        user_embed = self.mf_model.get_user_embed(
            torch.LongTensor([user_id]), extra_info)

        if fast_recommend:
            cur_coarse_result = self.coarse_result[user_id]
            item_embed = self.mf_model.get_item_embed(cur_coarse_result)
            user_embed = user_embed.expand([item_embed.shape[0], -1])

            mf_result = torch.argsort(
                torch.sum(user_embed*item_embed, dim=-1), descending=True)
            result = cur_coarse_result[mf_result]
        else:
            user_embed = user_embed.expand([self.item_embed.shape[0], -1])

            mf_result = torch.argsort(
                torch.sum(user_embed*self.item_embed, dim=-1), descending=True)
            result = mf_result[:self.coarse_result.shape[1]]

        return result.tolist()


if __name__ == '__main__':
    hyper_params = {
        'dataset_path': 'datasets/lib.txt',
        'batch_size': 128,
        'epochs': 2,
        'embed_dim': 32,
        'apply_weight': True,
        'neg_sample_cnt': 1000,
        'epsilon': 0.1,
        'extra_dim': 4,
        'apply_extra': True,
        'extra_weight': 0.01,
        'coarse_count': 500
    }

    recommender = Recommender(hyper_params)

    start_time = time.time()
    result = recommender.fine_recommend(0, fast_recommend=True)
    print(f'Time: {time.time() - start_time}')

    print(result)
