#coding=utf-8
import argparse
import torch
import torch.nn as nn
from torch.utils.data import DataLoader

from dataset import Dictionary, GQAFeatureDataset_BP
import base_model
from train import train


## Parameter setting
def parse_args():
    parser = argparse.ArgumentParser()
    parser.add_argument('--epochs', type=int, default=30)
    parser.add_argument('--num_hid', type=int, default=1024)
    parser.add_argument('--model', type=str, default='baseline0_newatt')
    # parser.add_argument('--output', type=str, default='saved_models/exp0')
    parser.add_argument('--output', type=str, default='saved_models_gqa/exp0')
    parser.add_argument('--batch_size', type=int, default=512)
    parser.add_argument('--seed', type=int, default=1111, help='random seed')
    args = parser.parse_args()
    return args


if __name__ == '__main__':
    args = parse_args()

    torch.manual_seed(args.seed)
    #seed the CPU to generate random Numbers so that the results are determined
    ## Calling the GPU
    torch.cuda.manual_seed(args.seed)
    #Set the random seed for the current GPU
    torch.backends.cudnn.benchmark = True

    ## read 'dictionary.pkl' file, content is 'word to id' and 'id to word'
    # dictionary = Dictionary.load_from_file('data/dictionary.pkl')
    dictionary = Dictionary.load_from_file('../data_gqa/dictionary.pkl')
    ## read train data set   val data set
    train_dset = GQAFeatureDataset_BP('train', dictionary)
    #A FeatureDataset is a container for feature classes that share the same spatial refrence
    eval_dset = GQAFeatureDataset_BP('val', dictionary)
    batch_size = args.batch_size

    constructor = 'build_%s' % args.model
    ### building the model
    ## getattr()-- you need to understand what it does
    model = getattr(base_model, constructor)(train_dset, args.num_hid).cuda()
    # model.w_emb.init_embedding('data/glove6b_init_300d.npy')
    model.w_emb.init_embedding('../data_gqa/glove6b_init_300d.npy')

    ## use multi-GPU
    model = nn.DataParallel(model).cuda()

    ## Making data loder(train set and eval set).
    ## 'DataLoader' is a special data loading format in pytorch
    # train_loader = DataLoader(train_dset, batch_size, shuffle=True, num_workers=1)
    # eval_loader =  DataLoader(eval_dset, batch_size, shuffle=True, num_workers=1)
    train_loader = DataLoader(train_dset, batch_size, shuffle=True)
    eval_loader = DataLoader(eval_dset, batch_size, shuffle=True)

    ## train the model
    train(model, train_loader, eval_loader, args.epochs, args.output, 'BP')
