import torch
import torch.utils.data as Data
import json
import os
from pathlib import Path
import jieba
import pickle
from typing import List, Tuple, Set
import random
from TransFM.utils import get_date_str, get_sem_str
import time


hyper_params = {
    'prerec_result': 'TransFM/model_dat/prerec_result.pkl',
    'data_path': 'datasets/',
    'max_title_len': 10
}

class BookUserInfo:
    def __init__(self, hyper_params):
        print('dataset: Initializing BookUserInfo model...')
        self.hyper_params = hyper_params

        # Load all book and user data from file
        data_path = Path(hyper_params['data_path'])
        self.book_list = json.load(open(Path(data_path)/'book_data'/'book_list.json', 'rt'))
        self.user_list = json.load(open(Path(data_path)/'user_data'/'user_list.json', 'rt'))
        self.interact_list = json.load(open(Path(data_path)/'interaction_data'/'interaction_all.json', 'rt'))

        # Load book-related tensor (title, book_type)
        self.title_tensor = self.__title_to_tensor()
        self.book_type_tensor = self.__book_type_tensor()

        # Load date and sem dict
        self.date_dict, self.sem_dict, self.user_len_tensor = self.__interact_related_info()

        # Load user-related tensor (school, grade)
        self.user_school_tensor, self.user_grade_tensor = self.__user_info_tensor()
        print('dataset: BookUserInfo model initialized.')

    def update_hyper_params(self):
        self.hyper_params['book_cnt'] = len(self.book_list)
        self.hyper_params['user_cnt'] = len(self.user_list)
        self.hyper_params['type_cnt'] = len(self.book_type_dict)
        self.hyper_params['school_cnt'] = len(self.user_school_dict)
        self.hyper_params['grade_cnt'] = len(self.user_grade_dict)
        self.hyper_params['word_cnt'] = self.word_cnt
        self.hyper_params['date_cnt'] = len(self.date_dict)
        self.hyper_params['sem_cnt'] = len(self.sem_dict)

        return self.hyper_params

    def __interact_related_info(self) -> Tuple[torch.Tensor, torch.Tensor]:
        print('dataset: Adding interact related info...')
        # Load previous dict
        if os.path.exists('TransFM/model_dat/date_dict.pkl'):
            date_dict = pickle.load(open('TransFM/model_dat/date_dict.pkl', 'rb'))
        else:
            date_dict = {'0': 0}

        if os.path.exists('TransFM/model_dat/sem_dict.pkl'):
            sem_dict = pickle.load(open('TransFM/model_dat/sem_dict.pkl', 'rb'))
        else:
            sem_dict = {'0': 0}

        user_len_tensor = torch.zeros([len(self.user_list)], dtype=torch.long)

        added_date_cnt, added_sem_cnt = 0, 0

        cur_date_str = time.strftime("%Y%m%d", time.localtime())

        for k, v in self.interact_list.items():
            for hist in v:
                date = hist['date']
                date_str = get_date_str(date)
                if date_str not in date_dict:
                    date_dict[date_str] = len(date_dict)
                    added_date_cnt += 1
                
                sem_str = get_sem_str(date, self.user_list[k]['sid'])
                if sem_str not in sem_dict:
                    sem_dict[sem_str] = len(sem_dict)
                    added_sem_cnt += 1

            # Date for today
            date_str = get_date_str(cur_date_str)
            if date_str not in date_dict:
                date_dict[date_str] = len(date_dict)
                added_date_cnt += 1
            sem_str = get_sem_str(cur_date_str, self.user_list[k]['sid'])
            if sem_str not in sem_dict:
                sem_dict[sem_str] = len(sem_dict)
                added_sem_cnt += 1

            # Here, train_len = total_len
            user_len_tensor[int(k)] = len(v)

        # Save dict
        os.makedirs('TransFM/model_dat', exist_ok=True)
        pickle.dump(date_dict, open('TransFM/model_dat/date_dict.pkl', 'wb'))
        pickle.dump(sem_dict, open('TransFM/model_dat/sem_dict.pkl', 'wb'))

        print(f'dataset: {added_date_cnt} dates and {added_sem_cnt} sem added.')

        return date_dict, sem_dict, user_len_tensor

    def __title_to_tensor(self) -> torch.Tensor:
        print('dataset: Title to tensor...')
        # Load previous key
        if os.path.exists('TransFM/model_dat/title_word_dict.pkl'):
            word_dict = pickle.load(open('TransFM/model_dat/title_word_dict.pkl', 'rb'))
        else:
            word_dict = {None: 0}

        title_tensor = torch.zeros([len(self.book_list), self.hyper_params['max_title_len']], dtype=torch.long)

        added_word_cnt = 0

        # Convert to word tensor
        for k, v in self.book_list.items():
            k = int(k)
            title = v['title']

            word_list = jieba.lcut(title)
            
            for i, word in enumerate(word_list):
                if word not in word_dict:
                    word_dict[word] = len(word_dict)
                    added_word_cnt += 1
                
                if i >= self.hyper_params['max_title_len']:
                    break

                title_tensor[k, i] = word_dict[word]

        # Save word dict
        os.makedirs('TransFM/model_dat', exist_ok=True)
        pickle.dump(word_dict, open('TransFM/model_dat/title_word_dict.pkl', 'wb'))

        self.word_cnt = len(word_dict)

        print(f'dataset: Title to tensor finished. {added_word_cnt} new words added.')

        return title_tensor

    def __book_type_tensor(self) -> torch.Tensor:
        # Load previous key
        if os.path.exists('TransFM/model_dat/book_type_dict.pkl'):
            self.book_type_dict = pickle.load(open('TransFM/model_dat/book_type_dict.pkl', 'rb'))
        else:
            self.book_type_dict = {}

        # Convert to book type tensor
        book_type_tensor = torch.zeros([len(self.book_list)], dtype=torch.long)

        added_book_type_cnt = 0

        for k, v in self.book_list.items():
            k = int(k)

            if v['type'] not in self.book_type_dict:
                self.book_type_dict[v['type']] = len(self.book_type_dict)
                added_book_type_cnt += 1
            book_type_tensor[k] = self.book_type_dict[v['type']]

        # Save book type dict
        os.makedirs('TransFM/model_dat', exist_ok=True)
        pickle.dump(self.book_type_dict, open('TransFM/model_dat/book_type_dict.pkl', 'wb'))
        print(f'dataset: {added_book_type_cnt} new book types added.')

        return book_type_tensor

    def __user_info_tensor(self) -> Tuple[torch.Tensor, torch.Tensor]:
        # Load previous key
        if os.path.exists('TransFM/model_dat/user_school_dict.pkl'):
            user_school_dict = pickle.load(open('TransFM/model_dat/user_school_dict.pkl', 'rb'))
        else:
            user_school_dict = {}

        if os.path.exists('TransFM/model_dat/user_grade_dict.pkl'):
            user_grade_dict = pickle.load(open('TransFM/model_dat/user_grade_dict.pkl', 'rb'))
        else:
            user_grade_dict = {}

        added_school_cnt, added_grade_cnt = 0, 0

        # Convert to user_school_tensor and user_grade_tensor
        user_school_tensor = torch.zeros([len(self.user_list)], dtype=torch.long)
        user_grade_tensor = torch.zeros([len(self.user_list)], dtype=torch.long)

        for k, v in self.user_list.items():
            k = int(k)

            user_grade = v['sid'][:3]
            if len(v['sid']) != 12:
                user_grade = '0'

            if v['school'] not in user_school_dict:
                user_school_dict[v['school']] = len(user_school_dict)
                added_school_cnt += 1
            if user_grade not in user_grade_dict:
                user_grade_dict[user_grade] = len(user_grade_dict)
                added_grade_cnt += 1

            user_school_tensor[k] = user_school_dict[v['school']]
            user_grade_tensor[k] = user_grade_dict[user_grade]

        # Save user info dict
        os.makedirs('TransFM/model_dat', exist_ok=True)
        pickle.dump(user_school_dict, open('TransFM/model_dat/user_school_dict.pkl', 'wb'))
        pickle.dump(user_grade_dict, open('TransFM/model_dat/user_grade_dict.pkl', 'wb'))

        self.user_school_dict = user_school_dict
        self.user_grade_dict = user_grade_dict

        print(f'dataset: {added_school_cnt} schools and {added_grade_cnt} grades added.')

        return user_school_tensor, user_grade_tensor


class TrainDataset(Data.Dataset):
    def __init__(self, hyper_params, book_user_info: BookUserInfo):
        super(TrainDataset).__init__()
        self.hyper_params = hyper_params

        print(f'dataset: Loading train data...')

        # Define data_path
        data_path = Path(hyper_params['data_path'])
        
        # Load interaction data
        self.interact_dict = json.load(open(Path(data_path)/'interaction_data'/f'interaction_all.json', 'rt'))
        
        # Load BookUserInfo data into basic_info
        self.basic_info = book_user_info
        self.user_cnt = len(book_user_info.user_list)
        self.book_cnt = len(book_user_info.book_list)

        # Initialize history list and interact list
        self.history_list = self.__get_history_list()
        self.interact_list = self.__get_interact_list()
        self.interact_cnt = len(self.interact_list)

        # Finish
        print(f'dataset: train data loaded. {self.user_cnt} users, {self.book_cnt} books and {self.interact_cnt} interactions in total.')

    def __get_history_list(self) -> List[Set[int]]:
        result: List[Set[int]] = [None] * self.user_cnt

        for k, v in self.interact_dict.items():
            k = int(k)

            cur_result = set()
            for hist in v:
                cur_result.add(hist['book_id'])

            result[k] = cur_result

        return result

    def __get_interact_list(self) -> List[Tuple[int, int, str]]:
        print('dataset: getting interact list...')
        result: List[Tuple[int, int]] = []

        for k, v in self.interact_dict.items():
            k = int(k)

            for hist in v:
                date_str = get_date_str(hist['date'])
                date = self.basic_info.date_dict[date_str]
                sem_str = get_sem_str(date_str, self.basic_info.user_list[str(k)]['sid'])
                sem = self.basic_info.sem_dict[sem_str]
                result.append((k, hist['book_id'], date, sem))

        return result

    def __len__(self):
        return self.interact_cnt

    def __getitem__(self, index):
        """
            Return: user_id, school, grade, pos_book_id, pos_book_title, pos_book_type, neg_book_id, neg_book_title, neg_book_type
        """
        # Get current interaction
        user_id, book_id, date, sem = self.interact_list[index]

        # User Info
        user_school = self.basic_info.user_school_tensor[user_id]
        user_grade = self.basic_info.user_grade_tensor[user_id]
        user_len = self.basic_info.user_len_tensor[user_id]

        # Pos Book
        pos_title = self.basic_info.title_tensor[book_id]
        pos_type = self.basic_info.book_type_tensor[book_id]

        # Neg Book
        cur_hist = self.history_list[user_id]
        while True:
            neg_book_id = random.randint(0, self.book_cnt-1)
            if neg_book_id not in cur_hist:
                break

        neg_title = self.basic_info.title_tensor[neg_book_id]
        neg_type = self.basic_info.book_type_tensor[neg_book_id]

        # Return
        return torch.tensor(user_id), user_school, user_grade, user_len, torch.tensor(date), torch.tensor(sem), torch.tensor(book_id), pos_title, pos_type, torch.tensor(neg_book_id), neg_title, neg_type


class TestDataset(Data.Dataset):
    def __init__(self, hyper_params, book_user_info: BookUserInfo):
        super(TestDataset).__init__()
        self.hyper_params = hyper_params

        print(f'dataset: Loading test data...')

        # Load prerec result
        self.prerec_result: torch.Tensor = torch.load(open(hyper_params['prerec_result'], 'rb'))
        
        # Load BookUserInfo data into basic_info
        self.basic_info = book_user_info
        self.user_cnt = len(book_user_info.user_list)
        self.book_cnt = len(book_user_info.book_list)

        # Get interact cnt
        self.interact_cnt = self.user_cnt * self.prerec_result.size(1)

        # Load user_date_sem
        self.user_date_sem = self.__user_date_dict()

        # Finish
        print(f'dataset: test data loaded. {self.user_cnt} users, {self.book_cnt} books and {self.interact_cnt} interactions in total.')

    def __user_date_dict(self):
        user_date_sem = {}

        # Get current datestr
        cur_date_str = time.strftime("%Y%m%d", time.localtime())

        for k in range(self.user_cnt):
            k = int(k)
            
            date_str = get_date_str(cur_date_str)
            date = self.basic_info.date_dict[date_str]
            sem_str = get_sem_str(cur_date_str, self.basic_info.user_list[str(k)]['sid'])
            sem = self.basic_info.sem_dict[sem_str]
            user_date_sem[k] = (date, sem)

        return user_date_sem

    def __len__(self):
        return self.interact_cnt

    def __getitem__(self, index):
        """
            Return: user_id, school, grade, pos_book_id, pos_book_title, pos_book_type, neg_book_id, neg_book_title, neg_book_type
        """
        # Get current interaction
        user_id = index // self.prerec_result.size(1)
        book_id = self.prerec_result[user_id, index%self.prerec_result.size(1)]

        # User Info
        user_school = self.basic_info.user_school_tensor[user_id]
        user_grade = self.basic_info.user_grade_tensor[user_id]
        user_len = self.basic_info.user_len_tensor[user_id]

        # Pos Book
        pos_title = self.basic_info.title_tensor[book_id]
        pos_type = self.basic_info.book_type_tensor[book_id]

        # Date and sem
        date, sem = self.user_date_sem[user_id]

        # Return
        return torch.tensor(user_id), user_school, user_grade, user_len, torch.tensor(date), torch.tensor(sem), book_id, pos_title, pos_type
