"""
    Generate dataset for PreRec and TransFM.
    Files should be stored under raw_data/
    Format:
    title@code@school@ISBN@sid@name@date
"""

import json
import os
import re

# Hyper Parameters
min_user_cnt = 1
min_book_cnt = 1
raw_data_path = json.load(open(os.path.join('config', 'conf.json'), 'rt'))['raw_data']

# Return dict with int keys
def dict_with_int_key(x):
    return dict([(int(k), v) for k, v in x.items()])

def generate_dataset():
    # User and Book dict (user_id <-> key)
    # user_list.json     user_id: (sid, name, school)
    if os.path.exists('dataset/user_data/user_list.json'):
        user_list = dict_with_int_key(json.load(open('dataset/user_data/user_list.json', 'rt')))
    else:
        user_list = {}
    # user_dict.json     sid: user_id
    if os.path.exists('dataset/user_data/user_dict.json'):
        user_dict = json.load(open('dataset/user_data/user_dict.json', 'rt'))
    else:
        user_dict = {}
    # book_list.json     book_id: (book key, title, ISBN, code, type)
    if os.path.exists('dataset/book_data/book_list.json'):
        book_list = dict_with_int_key(json.load(open('dataset/book_data/book_list.json', 'rt')))
    else:
        book_list = {}
    # book_dict.json     book key: book_id
    if os.path.exists('dataset/book_data/book_dict.json'):
        book_dict = json.load(open('dataset/book_data/book_dict.json', 'rt'))
        print('Previous dict loaded.')
    else:
        book_dict = {}
    # interaction_all.json   user_id: List(book_id, date)
    interaction_all = {}

    # Store count for user and book
    user_cnt_dict = {}
    book_cnt_dict = {}

    # Store temp interactions (may be drop)
    temp_interactions = []

    # Store added users and books
    added_user_cnt, added_book_cnt = 0, 0

    # Reading files in raw_data folder
    file_list = os.listdir(raw_data_path)

    for file in file_list:
        file_path = os.path.join(raw_data_path, file)
        print(f'Reading {file}...')
        with open(file_path, 'rt') as f:
            for line in f.readlines():
                content = line.rstrip().split('@')
                if len(content) != 7:
                    continue
                title, code, school, ISBN, sid, name, date = content

                if len(date) != 8: continue
                if any([len(i) == 0 for i in [code, title, ISBN, name, sid, school, date]]):
                    continue
                cur_type = re.match('[0-9A-Za-z]*', code).group()
                if len(cur_type) == 0:
                    continue
                
                book_key = f'{title}@{ISBN}@{code}'
            
                if sid in user_cnt_dict and book_key in user_cnt_dict[sid]: continue

                temp_interactions.append((sid, book_key, date))

                if sid not in user_cnt_dict: user_cnt_dict[sid] = set()
                if book_key not in book_cnt_dict: book_cnt_dict[book_key] = 0

                user_cnt_dict[sid].add(book_key)
                book_cnt_dict[book_key] += 1

                if len(user_cnt_dict[sid]) >= min_user_cnt and sid not in user_dict:
                    user_id = len(user_list)
                    user_list[user_id] = {
                        "sid": sid,
                        "name": name,
                        "school": school
                    }
                    user_dict[sid] = user_id
                    added_user_cnt += 1

                if book_cnt_dict[book_key] == min_book_cnt and book_key not in book_dict:
                    book_id = len(book_list)
                    book_list[book_id] = {
                        "book_key": book_key,
                        "title": title,
                        "ISBN": ISBN,
                        "code": code,
                        "type": cur_type
                    }
                    book_dict[book_key] = book_id
                    added_book_cnt += 1

    print('Generating data...')
    # Generate Interactions
    temp_interactions = sorted(temp_interactions, key=lambda x: x[-1])

    for sid, book_key, date in temp_interactions:
        if sid not in user_dict or book_key not in book_dict:
            continue
        
        user_id, book_id = user_dict[sid], book_dict[book_key]

        if user_id not in interaction_all: interaction_all[user_id] = []

        interaction_all[user_id].append({
            "book_id": book_dict[book_key],
            "date": date
        })

    # Save interaction results
    def save_interaction_result(interactions, name):
        json.dump(interactions, open(f'dataset/interaction_data/{name}.json', 'wt'), ensure_ascii=False)
        
        with open(f'dataset/interaction_data/{name}.txt', 'wt') as f:
            for user_id, interact_seq in interactions.items():
                for interact in interact_seq:
                    user_data = user_list[user_id]
                    book_data = book_list[interact['book_id']]
                    date = interact['date']

                    sid, school = user_data['sid'], user_data['school']
                    code, title, ISBN = book_data['code'], book_data['title'], book_data['ISBN']

                    line = f'{code}@{title}@{ISBN}@{sid}@{school}@{date}\n'

                    f.write(line)

    # Create Folders
    os.makedirs('dataset/book_data', exist_ok=True)
    os.makedirs('dataset/user_data', exist_ok=True)
    os.makedirs('dataset/interaction_data', exist_ok=True)

    # Save results
    json.dump(user_list, open('dataset/user_data/user_list.json', 'wt'), ensure_ascii=False)
    json.dump(user_dict, open('dataset/user_data/user_dict.json', 'wt'), ensure_ascii=False)
    json.dump(book_list, open('dataset/book_data/book_list.json', 'wt'), ensure_ascii=False)
    json.dump(book_dict, open('dataset/book_data/book_dict.json', 'wt'), ensure_ascii=False)

    save_interaction_result(interaction_all, 'interaction_all')

    # Print results
    print(f'Finished. {len(user_list)} users, {len(book_list)} books in total')
    print(f'{added_book_cnt} books and {added_user_cnt} users are added in this process.')
