import tensorflow as tf
import os
import numpy as np
import sys
from argparse import ArgumentParser
import cv2
import shutil
from data_util.streamer import Oulu_NPU, OneLabelData, HomeMadeData, REPLAY_ATTACK
from data_util.parser import *

def evaluate_stream( ds , model_dir , niter = 1 ):
    """
    ds: a stream which is callable many times
    model_dir: a directory contain meta graph and parameters
    niter: times to compute throught the whole dataset

    return: a dict contain a lots of lists
    """

    sess = tf.Session()
    
    ckpt_path = tf.train.latest_checkpoint( model_dir )
    if ckpt_path == None:
        return []
    
    meta_path = ckpt_path + '.meta'
    saver = tf.train.import_meta_graph( meta_path )
    saver.restore( sess , ckpt_path )

    graph = tf.get_default_graph()
    inputs = graph.get_tensor_by_name( "images:0" )
    labels_placeholder = graph.get_tensor_by_name( "label:0" )

    ## handling all the results
    acc = graph.get_tensor_by_name( "acc:0" )
    argmax = graph.get_tensor_by_name( "argmax:0" )

    d = {}
    acc_list = []
    FAR_list = []
    FRR_list = []
    HTER_list = []

    for _ in range( niter ):
        imgs, labels = sess.run( ds )
        ACC, ARGMAX = sess.run( [acc, argmax] , feed_dict = { \
                inputs : imgs , labels_placeholder: labels } )

        # compute all the TP, FP, TN, FN stuffs
        TP = 0
        FP = 0
        TN = 0
        FN = 0

        for index in range( ARGMAX.shape[0] ):
            if ARGMAX[index] == 1 and ARGMAX[index] == labels[index]:
                TP = TP + 1
            elif ARGMAX[index] == 0 and ARGMAX[index] == labels[index]:
                TN = TN + 1
            elif ARGMAX[index] == 1 and ARGMAX[index] != labels[index]:
                FP = FP + 1
            else:
                FN = FN + 1

        if FP + TN == 0:
            FAR = 0
        else:
            FAR = FP/( FP + TN )
        if TP + FN == 0:
            FRR = 0
        else:
            FRR = FN/( TP + FN )

        HTER = ( FRR + FAR ) / 2

        acc_list.append( ACC )
        FAR_list.append( FAR )
        FRR_list.append( FRR )
        HTER_list.append( HTER )

    d['acc'] = acc_list
    d['FAR'] = FAR_list
    d['FRR'] = FRR_list
    d['HTER'] = HTER_list

    return d
    

def test_stream_acc( sess , dataStream , model_dir , niter = 1 ):
    saver = tf.train.import_meta_graph( os.path.join( model_dir , "model.ckpt.meta") )
    saver.restore( sess , tf.train.latest_checkpoint( model_dir ) )

    graph = tf.get_default_graph()

    #inputs = graph.get_tensor_by_name( "images:0" )
    inputs = graph.get_tensor_by_name( "module/hub_input/images:0" )
    labels_placeholder = graph.get_tensor_by_name( "label:0" )
    #relu_out = graph.get_tensor_by_name( "final_fc/Relu:0")
    #probability = tf.nn.softmax( relu_out )
    #img_argmax = graph.get_tensor_by_name( "argmax:0" )

    acc = graph.get_tensor_by_name( "acc:0" )
    acc_list = []

    for _ in range( niter ):
        imgs , labels = sess.run( dataStream )
        ACC = sess.run( [ acc, ] , \
                feed_dict = { inputs : imgs , \
                labels_placeholder : labels } )

        acc_list.append( ACC )
    
    return acc_list

def write_testFailure( dataStream , model_dir , niter , write_dir ):
    # construct a process to carry every computation
    sess = tf.Session()

    if shutil.os.path.exists( write_dir ):
        shutil.rmtree( write_dir )

    os.makedirs( write_dir )

    failure_dir = os.path.join( write_dir, 'failureCase' )
    succeed_dir = os.path.join( write_dir, 'succeedCase' )
    os.mkdir( failure_dir )
    os.mkdir( succeed_dir )

    saver = tf.train.import_meta_graph( os.path.join( model_dir , "model.ckpt.meta") )
    saver.restore( sess , tf.train.latest_checkpoint( model_dir ) )

    graph = tf.get_default_graph()

    #inputs = graph.get_tensor_by_name( "images:0" )
    inputs = graph.get_tensor_by_name( "module/hub_input/images:0" )
    labels_placeholder = graph.get_tensor_by_name( "label:0" )

    relu_out = graph.get_tensor_by_name( "final_fc/Relu:0")
    probability = tf.nn.softmax( relu_out )
    img_argmax = graph.get_tensor_by_name( "argmax:0" )

    for _ in range( niter ):
        imgs , labels = sess.run( dataStream )
        PRO, IMG_ARGMAX = sess.run( [ probability , img_argmax ] , \
                feed_dict = { inputs : imgs } )

        for i in range( labels.shape[0] ):
            #failure cases
            if IMG_ARGMAX[i] != labels[i]:
                if labels[i] == 0:
                    img_name = tempfile.mktemp( prefix = "fp_", \
                            suffix = ".jpg" , dir = failure_dir )
                else:
                    img_name = tempfile.mktemp( prefix = "fn_", \
                            suffix = ".jpg" , dir = failure_dir )

            # suceed cases
            else:
                img_name = tempfile.mktemp( \
                        suffix = ".jpg" , dir = succeed_dir )

            pro = str( PRO[i][IMG_ARGMAX[i]] )

            img_uint8 = 256 * imgs[i]
            img_uint8 = img_uint8.astype( np.uint8 )
            img_uint8 = cv2.cvtColor( img_uint8 , cv2.COLOR_BGR2RGB )

            cv2.imwrite( img_name[:-4] + '_' + pro + '.jpg', img_uint8 )
    
def preprocessing( sess , img_name , height , width ):
    content = tf.read_file( img_name )
    tf_image = tf.image.decode_jpeg( content , channels = 3)
    tf_image = tf.image.convert_image_dtype( tf_image , tf.float32 )
    tf_image = tf.image.resize_images( tf_image , [ height , width ] )
    tf_image = tf.reshape( tf_image , [1, height , width , 3 ] )

    return sess.run( tf_image )

def imgs_infer( sess , imgs , model_dir , height , width ):

    saver = tf.train.import_meta_graph( os.path.join( model_dir , "model.ckpt.meta") )
    saver.restore( sess , tf.train.latest_checkpoint( model_dir ) )

    graph = tf.get_default_graph()

    #inputs = graph.get_tensor_by_name( "images:0" )
    inputs = graph.get_tensor_by_name( "module/hub_input/images:0" )

    relu_out = graph.get_tensor_by_name( "final_fc/Relu:0")
    probability = tf.nn.softmax( relu_out )
    img_argmax = graph.get_tensor_by_name( "argmax:0" )

    argmax_list = []
    confidence_list = []
    
    for img in imgs:
        tf_image = preprocessing( sess , img , height , width )

        ARGMAX , PRO = sess.run( [ img_argmax, probability ],\
                feed_dict = { inputs: tf_image} )

        confidence = PRO[0][ARGMAX[0]]

        argmax_list.append( ARGMAX )
        confidence_list.append( confidence )

    return argmax_list , confidence_list

if __name__ == "__main__":
    os.environ["CUDA_VISIBLE_DEVICES"] = "0"
    tf.logging.set_verbosity(tf.logging.INFO)

    #parser = ArgumentParser()
    #parser.add_argument( "-i" , dest="img_name" )

    #args = parser.parse_args()
    
    sess = tf.Session()

    #data_dir = "/home/jh/working_data/anti-spoofing/Oulu_NPU"
    #data_dir = "/home/jh/working_data/anti-spoofing/replay_attack/original_REPLAY_ATTACK/replayattack"
    #data_dir = "/home/jh/working_data/anti-spoofing/nuaa/cropped_face_only_NUAA"
    #data_dir = "/home/jh/working_data/anti-spoofing/MSU_USSA_Public_bk/SpoofSubjectImages/Tablet_FrontCamera"
    #data_dir = "/home/jh/working_data/anti-spoofing/HomeMadeData/made_by_CRXs_iPhone_ZCCiPhone/test"
    data_dir = "/home/jh/working_data/anti-spoofing/inTime99_madeData/test"
    #model_dir = "./model_conv1_depthwise"
    model_dir = "./model_conv_1"

    #data = REPLAY_ATTACK( data_dir )
    #data = Oulu_NPU( data_dir )
    data = HomeMadeData( data_dir )
    #data = OneLabelData( data_dir , label = 0 )
    data_ops = data.testDataStream( 128 )

    d = evaluate_stream( data_ops , model_dir , 10 )

    for key, value in d.items():
        print( key )
        print( np.array(value).mean() )

        print( '\n' )

    #idCard_data = OneLabelData( data_dir , label = 1 )
    #data_ops  = idCard_data.testDataStream ( 100 )

    #acc_list = test_stream_acc( sess , data_ops , model_dir , niter = 20 )

    #write_testFailure( data_ops , model_dir , 1 , "./classification_result" )

    #print( acc_list )
    #print( np.mean( np.array( acc_list ) ) )

    """
    # inference a list of image,
    # will return argmax list and a confidence list

    sess = tf.Session()

    argmax_list , confidence_list = imgs_infer( sess , [args.img_name, ] , \
            "./tmp_REPLAY_ATTACK_crop_mobileNet_035_128", 128 , 128 )

    if argmax_list[0][0] == 0:
        print( "the img is positive, with confidence %f " % confidence_list[0] )
    else:
        print( "the img is negative, with confidence %f " % confidence_list[0] )
    """
