from pytorch_lightning import Trainer
from pytorch_lightning import seed_everything
from argparse import ArgumentParser
import torch
from tasks.ClassSummary.models.Seq2seq.model import Seq2seq
from tasks.ClassSummary.models.Seq2seq.data import Seq2seqDataModule
from config.Config import BASE_DIR
from pytorch_lightning.callbacks.early_stopping import EarlyStopping
from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint


def add_program_args(parser: ArgumentParser):
    parser.add_argument("--code_field", help="代码词表文件", type=str,
                        default=str(BASE_DIR / "tasks/ClassSummary/resource/code.field"))
    parser.add_argument("--nl_field", help="自然语言词表文件", type=str,
                        default=str(BASE_DIR / "tasks/ClassSummary/resource/nl.field"))
    parser.add_argument("--patience", help="Early Stop patience", type=int,
                        default=4)
    parser.add_argument("--check_freq", help="check freq", type=int, default=1)
    parser.add_argument("--max_epoch", help="max epoch", type=int, default=100)
    parser.add_argument("--lr", help="learning rate", type=float, default=0.001)
    parser.add_argument("--max_grad", help="max_grad_norm", type=float, default=5)
    parser.add_argument("--gpu_num", help="GPUS", default=0)
    parser.add_argument("--seed", help="random seed", default=1024)
    parser.add_argument("--save_path", help="save pkt file path", type=str, default="./model/seq2seq/")


def main(args):
    seed_everything(args.seed)
    cf = torch.load(args.code_field)
    nf = torch.load(args.nl_field)
    model = Seq2seq(
        encoder_vocab=cf.vocab,
        decoder_vocab=nf.vocab,
        lr=args.lr
    )
    early_stop_callback = EarlyStopping(monitor="bleu",
                                        min_delta=0.00,
                                        patience=args.patience,
                                        verbose=False,
                                        mode="max")
    checkpoint_callback = ModelCheckpoint(save_top_k=1, monitor="bleu", mode="max",
                                          dirpath=args.save_path,
                                          filename="Seq2seq-%s-{epoch:02d}-{bleu:.4f}" % args.input_flag)
    trainer = Trainer(check_val_every_n_epoch=args.check_freq,
                      gpus=args.gpu_num,
                      max_epochs=args.max_epoch,
                      gradient_clip_val=args.max_grad,
                      callbacks=[early_stop_callback, checkpoint_callback])
    data = Seq2seqDataModule(
        input_flag=args.input_flag,
        train_path=args.train,
        test_path=None,
        val_path=args.val,
        code_field=cf,
        nl_field=nf,
        batch_size=args.batch_size,
    )
    trainer.fit(model, datamodule=data)
    return checkpoint_callback.best_model_path


if __name__ == '__main__':
    arg_parser = ArgumentParser()
    add_program_args(arg_parser)
    Seq2seqDataModule.add_data_args(arg_parser)
    cli_args = arg_parser.parse_args()
    best_path = main(cli_args)
    print(best_path)
