# -*- coding: utf-8 -*-

import argparse
import os
from cpg.utils import set_basic_log_config, strtobool
from cpg.cpg_task import CPGTaskSetting, CPGTask
from cpg.cpg_helper import aggregate_metric_epoch, generate_poetry, evaluate_model_on_set

# explicitly set the best epoch
BEST_EPOCH = 87
# BEST_EPOCH = None

# app paths
DATAPATH = './Data'
EXPPATH = './Exps'
# pangchaoxu paths
# DATAPATH = '/data/pangchaoxu/poetry_generation/Data/ccpc'
# EXPPATH = '/data/pangchaoxu/poetry_generation/Exps'

set_basic_log_config()

def parse_args(in_args=None):
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('--task_name', type=str, required=True,
                            help='Take Name')
    arg_parser.add_argument('--data_dir', type=str, default=DATAPATH,
                            help='Data directory')
    arg_parser.add_argument('--exp_dir', type=str, default=EXPPATH,
                            help='Experiment directory')                   
    arg_parser.add_argument('--save_cpt_flag', type=strtobool, default=True,
                            help='Whether to save cpt for each epoch')
    arg_parser.add_argument('--skip_train', type=strtobool, default=False,
                            help='Whether to skip training')
    arg_parser.add_argument('--no_cuda', type=strtobool, default=False,
                            help='Whether to use cuda')
    arg_parser.add_argument('--gen_mode', type=str, default='prefix',
                            help='Generation startegy. prefix or head')
    arg_parser.add_argument('--prefix', type=str, default='秋',
                            help='Offer the prefix for generation')
    arg_parser.add_argument('--head', type=str, default='江河湖海',
                            help='Offer the head for generation')                        
    arg_parser.add_argument('--beam_size', type=int, default=1,
                            help='Beam size for generating poem')
    arg_parser.add_argument('--model_name', type=str, default='Transfm',
                            help='choose the model from Transfm, LSTM')
    arg_parser.add_argument('--trainable_pe', type=strtobool, default=False,
                            help='whether need trainable positional embedding')
    arg_parser.add_argument('--format', type=int, default=7,
                            help='五言或者七言')
    arg_info = arg_parser.parse_args(args=in_args)
    return arg_info


if __name__ == '__main__':
    in_argv = parse_args()
    task_dir = os.path.join(in_argv.exp_dir, in_argv.task_name)
    if not os.path.exists(task_dir):
        os.makedirs(task_dir, exist_ok=True)

    in_argv.model_dir = os.path.join(task_dir, "Model")
    in_argv.output_dir = os.path.join(task_dir, "Output")
    # in_argv must contain 'data_dir', 'model_dir', 'output_dir'
    cpg_setting = CPGTaskSetting(
        **in_argv.__dict__
    )

    if not in_argv.skip_train:
        # training 
        cpg_setting.dump_to(task_dir)
        # build task
        cpg_task = CPGTask(cpg_setting, load_train=not in_argv.skip_train, load_dev_test=not in_argv.skip_train)
        cpg_task.train(save_cpt_flag=in_argv.save_cpt_flag)
        aggregate_metric_epoch(in_argv.output_dir, save=False)
    else:
        cpg_setting.resume_setting_from(task_dir)
        # build task
        cpg_task = CPGTask(cpg_setting, load_train=not in_argv.skip_train, load_dev_test=not in_argv.skip_train)
        
        cpg_task.logging('Skip training')
        cpg_task.model.eval()
        
        best_epoch = BEST_EPOCH or aggregate_metric_epoch(in_argv.output_dir, model_type=cpg_task.setting.model_name)
        cpg_task.resume_cpt_at(best_epoch)

        batch_first = False if cpg_task.setting.model_name == 'LSTM' else True

        # evaluate_model_on_set(cpg_task.model, batch_first, in_argv.task_name)

        # poem = generate_poetry(cpg_task.model.forward, cpg_task.vocab, batch_first, 
        #                       mode=in_argv.gen_mode, prefix=in_argv.prefix, head=in_argv.head, beam_size=5, pnum=1)
        

        # TODO generation parameter five / seven, put the new model under ./Exps/format_transformer
        # TODO add new param(in the cpg_task) for 3 positional embeddings, such that the original model can also run in the CPGtask.
        # TODO then evaluate the LSTM, vanilla TF, Format TF on a test set {5/7, prefix/head}*5.
        
        start_token = '<f>' if in_argv.format==5 else '<s>'
        print(cpg_task.model.generate_poetry(batch_first=batch_first, \
                                            mode=in_argv.gen_mode, \
                                            prefix=in_argv.prefix, \
                                            head=in_argv.head, \
                                            beam_size=5,\
                                            start_token=start_token))
        # for p in poem: 
        #     print(p)
        
        
