# Generate the final output.
# Format: STU_ID;BOOK_ID1 BOOK_ID2 BOOK_ID3 ...
import time
import sqlite3
import json
import requests
from utils.log_output import simple_text_log
from tqdm import tqdm
from requests_toolbelt.multipart import encoder
from requests_toolbelt.multipart import decoder
import os
import pickle

time_str = time.strftime("%Y%m%d_%H%M%S", time.localtime())
output_dataset = f'result/DB_{time_str}.db'

hot_book_path = os.path.join('result', 'hot_books.txt')

class FinalOutput:
    def __init__(self):
        self.output_dataset = output_dataset

    def execute(self):
        # Create Dataset
        time_str = time.strftime("%Y%m%d_%H%M%S", time.localtime())
        self.output_dataset = f'result/DB_{time_str}.db'
        conn = sqlite3.connect(self.output_dataset)
        conn.execute(
            'CREATE TABLE book_info (book_id int primary key, isbn text, code text, title text)')
        conn.execute('CREATE TABLE book_rec (user_id int, book_id int)')
        conn.execute(
            'CREATE TABLE user_info (user_id int primary key, username text)')
        conn.execute(
            'CREATE TABLE name_to_user_id (username text primary key, user_id int)')
        conn.execute(
            'CREATE TABLE hot_book (book_id int primary key, count int)')
        conn.commit()

        print('Reading files...')

        simple_text_log('train', 'Generating database...')

        # Loading data
        book_list = json.load(open('dataset/book_data/book_list.json', 'rt'))
        user_list = json.load(open('dataset/user_data/user_list.json', 'rt'))
        rec_result = pickle.load(open('result/result.pkl', 'rb'))

        cursor = conn.cursor()
        for k, v in book_list.items():
            book_id = int(k)
            isbn = v['ISBN']
            code = v['code']
            title = v['title']

            title = title.replace('%', '').replace("'", "''")
            cursor.execute(
                f'INSERT INTO book_info VALUES ({book_id},\'{isbn}\',\'{code}\',\'{title}\')')
        conn.commit()

        for k, v in user_list.items():
            username = v['sid']
            user_id = int(k)

            cursor.execute(
                f'INSERT INTO user_info VALUES ({user_id},\'{username}\')')
            cursor.execute(
                f'INSERT INTO name_to_user_id VALUES (\'{username}\',{user_id})')
        conn.commit()

        f_hot_book = open(hot_book_path, 'rt')
        for line in f_hot_book.readlines():
            book_id, count = line.rstrip().split(';')
            cursor.execute(f'INSERT INTO hot_book VALUES ({book_id}, {count})')

        conn.commit()

        print('Writing Recommendation...')
        for user_id, rec_list in enumerate(tqdm(rec_result)):
            for rec in rec_list:
                cursor.execute(
                    f'INSERT INTO book_rec VALUES ({user_id}, {rec})')

        conn.commit()
        cursor.close()
        conn.close()

        print('Finished!')


def upload_db():
    conf = json.load(open(os.path.join('config', 'conf.json'), 'rt'))
    target_server_url = conf['db_upload_url']

    final_output = FinalOutput()
    final_output.execute()

    # 发送数据库至目标服务器
    dataset_filename = final_output.output_dataset.split('/')[-1]

    simple_text_log('train', 'Uploading db...')
    session = requests.Session()
    with open(final_output.output_dataset, 'rb') as f:
        filename = final_output.output_dataset.split('/')[-1]
        form = encoder.MultipartEncoder({
            "file": (filename, f, "application/octet-stream")
        }, boundary='----WebKitFromBoundaryFSDFDSJ')
        headers = {"Prefer": "respond-async", "Content-Type": form.content_type, "FileName": filename}
        resp = session.post(target_server_url+'upload_db', headers=headers, data=form)
        print(resp)

    # files = {'file': open(final_output.output_dataset, 'rb')}
    # requests.post(target_server_url+'upload_db', files=files)
    result = json.loads(requests.post(target_server_url+'set_db', json={'db_path': dataset_filename}).text)

    # Remove DB File
    os.remove(final_output.output_dataset)

    if 'ok' not in result or not result['ok']:
        simple_text_log('error', 'Fail to upload db.')
    else:
        simple_text_log('train', 'DB uploaded.')
