import torch
import torch.nn as nn
from opt import parse_opt
from util.dataloader import SimDataset
from torch.utils.data import DataLoader
from model.vggnet import vgg_pool7
from model.biLstm import RNNEncoder,PhraseAttention
from model.nmn import NMN
import numpy as np
import os.path as osp
import json
import time

def main():
    opt = vars(parse_opt())
    device = torch.device("cuda:%d"% opt['gpuid'] if torch.cuda.is_available() else "cpu")

    val_set = SimDataset(opt)
    dataloaders = {
        'val': DataLoader(val_set, batch_size=opt['batch_size'], shuffle=True, num_workers=0)
    }
    # input_data = next(iter(dataloaders['val']))
    # imcrop, spatial, text_seq, label = input_data['imcrop'], input_data['spatial'], input_data['text_seq'], \
    #                                    input_data['label']
    # sp_dim = spatial.size(2)


    model = NMN(opt,512,5,opt['word_vec_size'])
    if opt['load_ckpt']:
        #file = osp.join(opt['this_dir'], 'checkpoints','%s%d%s'% ('iter_',opt['n_iter'],'.pth'))
        if osp.exists(opt['load_ckpt']):
            t_ = time.time()
            model.load_state_dict(torch.load(opt['load_ckpt']))
            print('load from %s in %d '%(opt['load_ckpt'],time.time()-t_))
            print('base;',osp.basename(opt['load_ckpt']))
        else: print(opt['load_ckpt'],'not exisit')
    model.to(device)

    crit = nn.CrossEntropyLoss().to(device)
    optimizer = torch.optim.Adam(model.parameters(),
                                 lr=opt['learning_rate'],
                                 betas=(opt['optim_alpha'], opt['optim_beta']),
                                 eps=opt['optim_epsilon'])



    # crit = nn.LogSoftmax() #score, label []

    train(opt,device,dataloaders['val'],model,crit,optimizer)  # ,model=,crit=,loss_fn=)


def train(opt,device,dataloaders,model, loss_fn, optim):

    loss_history,info ={},{}
    optim.zero_grad()
    model.train()
    epoch,train_len =0,len(dataloaders)
    #input_data = next(iter(dataloaders))
    for ii, input_data in enumerate(dataloaders):
        iters= epoch*train_len+ii
    #for ii in range(opt['n_iter']):
        imcrop, spatial, text_seq, label = input_data['imcrop'], input_data['spatial'], input_data['text_seq'], \
                                           input_data['label']
        '''
        imcrop torch.Size([12, 25, 3, 224, 224])
        spatial torch.Size([12, 25, 5])
        text_seq 8 , (batch,seq_len,1)
        label torch.Size([12, 1])
        raw_im torch.Size([12, 50, 50, 3])
        raw_sent 8
        '''
        imcrop, spatial, text_seq,label = imcrop.to(device),spatial.to(device),text_seq.to(device),label.to(device)
        score,attn_phrase = model(imcrop,spatial,text_seq)
        #print(attn_phrase.shape)
        argmax = score.argmax(dim=1)
        #print(argmax.tolist(),'//',label.tolist())
        print(np.sum(np.array(argmax.tolist()) == np.array(label.squeeze(1).tolist())) / len(argmax.tolist()))
        loss = loss_fn(score,label.squeeze(1).long())
        loss_history[iters] = loss.item()
        if iters % opt['print_step'] == 0 :
            print(iters, '=', loss.item())
            if iters % opt['print_step']*10 ==0:
                attn_topk_idx = attn_phrase.size(1)-attn_phrase.topk(k=5,dim=1).indices
                print(attn_topk_idx)
        loss.backward()
        optim.step()

        if opt['n_iter']<iters:
            checkpointpath_ = osp.join(opt['this_dir'], 'checkpoints','%s%d%s'% ('iter_',opt['n_iter'],'.pth'))
            torch.save(model.state_dict(), checkpointpath_)
            print('model saved to %s' % checkpointpath_)
            info['opt'] = opt
            info['loss_history'] = loss_history
            json_name = '%s%d%s' % ('iter_', opt['n_iter'], '.json')
            f_=''
            json_name = '%s%d%s'% ('iter_',opt['n_iter'],'.json')
            if opt['load_ckpt']:
                f_ = osp.basename(opt['load_ckpt'])
            with open(osp.join(osp.join(opt['this_dir'], 'checkpoints',f_.join(json_name))), 'wb') as io:
                json.dump(info, io)
            break



if __name__ == '__main__':
    main()
