import torch
from BPR.Trainer import Trainer
from BPR.Model import MF
from BPR.FileLoader import FileLoader
from BPR.DataLoader import DataLoader
import time
from typing import Tuple, List


class Recommender:
    def __init__(self, hyper_params):
        self.hyper_params = hyper_params
        self.file_loader = FileLoader(hyper_params)
        self.user_cnt = self.file_loader.reader_cnt
        self.mf_model = MF(hyper_params)
        self.data_loader = DataLoader(hyper_params)

        try:
            self.mf_model.load_state_dict(
                torch.load('BPR/model_dat/model.pkl'))
            print('Model loaded')
        except Exception as err:
            print(err)

            trainer = Trainer(hyper_params)
            trainer.mf_model = self.mf_model
            trainer.fit(save_model=True)

        self.item_embed = self.mf_model.item_embedding.weight
        self.user_embed = self.mf_model.user_embedding.weight

        # self.id_to_book = self.file_loader.id_to_book

        # Generate coarse recommendation results
        self.coarse_recommend()
        self.generate_hot_list()

    def generate_hot_list(self):
        hot_book_dict = {}

        for _, history in self.file_loader.intereaction_list.items():
            for interact in history:
                if not interact.interaction_type == 0:
                    continue
                if interact.item_id not in hot_book_dict:
                    hot_book_dict[interact.item_id] = 0
                hot_book_dict[interact.item_id] += 1

        hot_book_list = sorted(hot_book_dict.items(),
                               key=lambda x: x[1], reverse=True)
        self.hot_book_i = [i[0] for i in hot_book_list]
        self.hot_book_p = [i[1] for i in hot_book_list]

    def coarse_recommend(self):
        print('Recommender: Generating coarse recommendation...')
        coarse_count = 500
        if 'coarse_count' in self.hyper_params:
            coarse_count = self.hyper_params['coarse_count']

        item_cnt = self.hyper_params['item_cnt']
        user_cnt = self.hyper_params['user_cnt']

        self.coarse_result = torch.empty(
            [user_cnt, coarse_count], dtype=torch.long)

        batch_size = self.hyper_params['batch_size']
        batch_cnt = user_cnt // batch_size

        for i in range(batch_cnt):
            left = i * batch_size
            right = left + batch_size
            if i == batch_cnt - 1:
                right = user_cnt

            cur_user = self.user_embed[left:right].unsqueeze(1).expand(
                [-1, item_cnt, -1])
            mf_result = torch.sum(cur_user*self.item_embed, dim=-1)
            self.coarse_result[left:right] = torch.argsort(
                mf_result, dim=-1, descending=True)[:, :coarse_count]

        print('Recommender: Finished.')

    def fine_recommend(self, user_id, extra_info=None, fast_recommend=True):
        if extra_info is None:
            extra_info = torch.zeros(
                [1, self.hyper_params['extra_dim']], dtype=torch.float32)

        user_embed = self.mf_model.get_user_embed(
            torch.LongTensor([user_id]), extra_info)

        if fast_recommend:
            cur_coarse_result = self.coarse_result[user_id]
            item_embed = self.mf_model.get_item_embed(cur_coarse_result)
            user_embed = user_embed.expand([item_embed.shape[0], -1])

            mf_result = torch.argsort(
                torch.sum(user_embed*item_embed, dim=-1), descending=True)
            result = cur_coarse_result[mf_result]
        else:
            user_embed = user_embed.expand([self.item_embed.shape[0], -1])

            mf_result = torch.argsort(
                torch.sum(user_embed*self.item_embed, dim=-1), descending=True)
            result = mf_result[:self.coarse_result.shape[1]]

        return result.tolist()

    def get_recommend_p(self, user_id, extra_info=None) -> Tuple[List[float], List[int]]:
        if extra_info is None:
            extra_info = torch.zeros(
                [1, self.hyper_params['extra_dim']], dtype=torch.float32)

        if user_id >= self.user_cnt:
            return None, None

        user_embed = self.mf_model.get_user_embed(
            torch.LongTensor([user_id]), extra_info)

        cur_coarse_result = self.coarse_result[user_id]
        item_embed = self.mf_model.get_item_embed(cur_coarse_result)
        user_embed = user_embed.expand([item_embed.shape[0], -1])

        score_result = torch.sum(user_embed*item_embed, dim=-1)
        p_result, i_result = torch.sort(score_result, descending=True)
        i_result = cur_coarse_result[i_result]

        return p_result.tolist(), i_result.tolist()

    def get_sorted_recommend_list(self, user_id, book_id_list, extra_info=None) -> Tuple[List[float], List[int]]:
        if extra_info is None:
            extra_info = torch.zeros(
                [1, self.hyper_params['extra_dim']], dtype=torch.float32)

        if user_id >= self.user_cnt:
            return None, None

        user_embed = self.mf_model.get_user_embed(
            torch.LongTensor([user_id]), extra_info)

        book_id_tensor = torch.LongTensor(book_id_list)
        item_embed = self.mf_model.get_item_embed(book_id_tensor)
        user_embed = user_embed.expand([item_embed.shape[0], -1])

        score_result = torch.sum(user_embed*item_embed, dim=-1)
        i_result = torch.argsort(score_result, descending=True)

        return i_result.tolist()


if __name__ == '__main__':
    hyper_params = {
        'dataset_path': 'datasets/lib.txt',
        'batch_size': 128,
        'epochs': 2,
        'embed_dim': 32,
        'apply_weight': True,
        'neg_sample_cnt': 1000,
        'epsilon': 0.1,
        'extra_dim': 4,
        'apply_extra': True,
        'extra_weight': 0.01,
        'coarse_count': 500
    }

    recommender = Recommender(hyper_params)

    start_time = time.time()
    user_id = 20
    result = recommender.fine_recommend(user_id, fast_recommend=True)
    print(f'Time: {time.time() - start_time}')

    for i in range(30):
        flag = False
        for interact in recommender.file_loader.intereaction_list[user_id]:
            if result[i] == interact.item_id:
                flag = True
                break
        print(recommender.id_to_book[result[i]], '（已借阅）' if flag else '')
