import torch
import torch.nn as nn
from torch.autograd import Variable

import json
import os
import pickle
import h5py
import numpy as np
from Tools.eval_detection import ANETdetection

from Model.origin_tcn import TCN_Class2
from Tools.utils import sliding_window_aggregation_func,gen_json,nms_all
from Config.Config import ACTNET200V13_PKL,TSNscore_FEATURE_PATH,TEST_PROPOSALS_PKL,TRAIN_PROPOSALS_PKL,VAL_PROPOSALS_PKL,ACTNET200V13_JSON
from DataSet.Dataset_TSNscore_Class import Dataset_TSNscore_Class
from DataSet.Dataset_TSNscore import load_TSNscore_from_file,extract_TSN_feature

def classify_video(gt, feature_path='/home/DATASETS/actnet/tsn_score', subset='validation'):

    score_per_video = {}
    cnt = 0
    for vid, vitem in gt.items():
        if vitem['subset'] != subset or not os.path.exists(feature_path + '/feat/%s.h5' % vid):
            continue

        with h5py.File(feature_path + '/feat/%s.h5' % vid, 'r') as hf:
            fg = np.asarray(hf['fg'])
        with h5py.File(feature_path + '/flow/%s.h5' % vid, 'r') as hf:
            fg += np.asarray(hf['fg'])

        model_scores = sliding_window_aggregation_func(fg[:,np.newaxis,:], norm=False)
        model_scores = np.hstack((0, model_scores))
        score_per_video[vid] = model_scores

        if cnt % 100 == 0:
            print('classify video {}...'.format(cnt))
        cnt += 1

    return score_per_video

##########################################################3

SAVE_DIR                  = '/mnt/md1/Experiments/PYTSN_Test2'
RANKED_PROPOSALS_PKL      = '/mnt/md1/Experiments/PYTSN_Test2/ranked_proposals.pkl'
# SUBSET                    = 'validation'
SUBSET                    = 'testing'
CKPT                      = '/mnt/md1/Experiments/PYTSN_Test2/class2_model_11.ckpt'
OPEN_NMS_FOR_RANKED_PROPOSAL = True
OPEN_NMS_FOR_CLASS_PROPOSAL  = True

FEATURE = TSNscore_FEATURE_PATH

##########################################################3

with open(os.path.join(SAVE_DIR,RANKED_PROPOSALS_PKL),'rb') as f:
    ranked_proposals = pickle.load(f)

if OPEN_NMS_FOR_RANKED_PROPOSAL:
    ranked_proposals = nms_all(ranked_proposals, topK=20, nms_thor=0.45)

with open(ACTNET200V13_PKL, 'rb') as f:
    gt = pickle.load(f)
    names = gt['actionIDs']
    gt = gt['database']

with open(TRAIN_PROPOSALS_PKL, 'rb') as f:
    train_proposals = pickle.load(f)

model = TCN_Class2().cuda()
model = nn.DataParallel(model)
model.load_state_dict(torch.load(CKPT))
print('Load model successfully!')
model.eval()

# score_tsn = classify_video(gt, subset=SUBSET, feature_path=FEATURE)
# with open(os.path.join(SAVE_DIR,'score_tcn.pkl'),'wb') as f:
#     pickle.dump(score_tsn,f)

with open(os.path.join(SAVE_DIR,'score_tcn.pkl'),'rb') as f:
    score_tsn = pickle.load(f)

priors = [score_tsn]
weights = [1]

cnt = 0
lastvid = None
classified_proposals = dict()

for vid,proposal in ranked_proposals.items():

    if not os.path.exists( FEATURE + '/feat/%s.h5' % vid) or len(proposal)==0:
        continue
    feat = load_TSNscore_from_file(vid)

    num_frame = gt[vid]['numf']
    duration = gt[vid]['duration']
    proposal = np.asarray(proposal)
    classified_proposals[vid] = []

    for i in range(proposal.shape[0]):

        current = np.copy(proposal[i,...])
        current[3] = 0

        x = extract_TSN_feature(feat, current[np.newaxis,:], num_frame, duration, PYRAMID=[100000], repeat=False)

        if x is not None:

            x = np.dot(x.T,x).flatten()
            x = Variable(torch.from_numpy(x)).cuda().float()
            x = x.view(1,-1)
            prob = model(x)

            prob = prob.cpu().data.numpy()
            for prior, weight in zip(priors, weights):
                prior = prior[vid].reshape(1,201) * weight
                prob += prior
            pred = prob.argmax(1)
            current[3] = pred

        # print current
        classified_proposals[vid].append(current)

    if cnt%100 == 0 :
        print('evaling {} ... '.format(cnt))
    cnt += 1

with open(os.path.join(SAVE_DIR,'classified_proposals.pkl'),'wb') as f:
    pickle.dump(classified_proposals,f)

# with open(os.path.join(SAVE_DIR,'classified_proposals.pkl'),'rb') as f:
#     classified_proposals=pickle.load(f)

if OPEN_NMS_FOR_CLASS_PROPOSAL:
    classified_proposals = nms_all(classified_proposals, topK=20, nms_thor=1, remove_background=True)

# test on offical evaluation code
id2name = {}
for name, ids in names.items():
    id2name[ids['class']] = name
output = gen_json(classified_proposals, id2name)
PREDICT_JSON = os.path.join(SAVE_DIR,'prediction.json')
with open(PREDICT_JSON, 'w') as f:
    json.dump(output, f)

if SUBSET == 'validation':
    eval1 = ANETdetection(ACTNET200V13_JSON, PREDICT_JSON, subset='validation', tiou_thr=0.5, verbose=True, check_status=False)
    eval1.evaluate()
    eval2 = ANETdetection(ACTNET200V13_JSON, PREDICT_JSON, subset='validation', tiou_thr=0.75, verbose=True, check_status=False)
    eval2.evaluate()
    eval3 = ANETdetection(ACTNET200V13_JSON, PREDICT_JSON, subset='validation', tiou_thr=0.95, verbose=True, check_status=False)
    eval3.evaluate()
