import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.contrib.slim.python.slim.learning import train_step
from fetchData import *
import os
import numpy as np
from tensorflow.python import debug as tf_debug
import tensorflow_hub as hub
import sys

#data_dir = "/home/jh/working_data/anti-spoofing/replay_attack/cropped_face_N_icbc_positive_REPLAY_ATTACK"

data_path = "/home/jh/working_data/anti-spoofing/Oulu_NPU"
export_path = './model_wholeScale_mobileNet_035_128'


if __name__ == "__main__":
    tf.logging.set_verbosity(tf.logging.INFO)
    os.environ["CUDA_VISIBLE_DEVICES"] = "3"

    oulu = Oulu_NPU( data_path )
    train_data_ops = oulu.trainDataStream( 128 , 'p4' , 'Train' , 1 )
    test_data_ops  = oulu.testDataStream ( 128 , 'p4' , 'Test' , 1 )
    #idCard_data = binary_data( data_dir )

    #train_data_ops = idCard_data.trainDataStream( 100 , if_flip = True )
    #test_data_ops  = idCard_data.testDataStream ( 100 )

    # check if checkpoint exists in save dir
    # i suppose tf.Session is more intuitive
    sess = tf.Session()
    #sess = tf_debug.LocalCLIDebugWrapperSession(sess)

    # has downloading the model into directory
    # MODEL: mobilenet_v2_100_96
    # m = hub.Module( "https://tfhub.dev/google/imagenet/mobilenet_v2_100_96/classification/2" , trainable = True )
    #m = hub.Module( os.path.join( "/home/jh/working_data/models/tensorflow_hub/"
    #        "8120b7321d9e14533232b1ddd4a74db35324b638" ) , trainable = False  )

    m = hub.Module( "/home/jh/working_data/models/tensorflow_hub/mobileNet_v2_140_224" , \
          trainable = False )

    graph = tf.get_default_graph()

    imgs = tf.placeholder( tf.float32 , [None , 224, 224, 3 ] , name = "images" )
    #imgs = graph.get_tensor_by_name( "module/hub_input/images:0" )
    #imgs_features = graph.get_tensor_by_name( "module/hub_output/feature_vector/SpatialSqueeze:0" )

    imgs_features = m( imgs )
    label_placeholder = tf.placeholder( tf.int64 , [None] , name = "label" )

    net = slim.fully_connected( imgs_features , 2 , scope = "final_fc" )

    loss = tf.losses.sparse_softmax_cross_entropy( \
            labels = label_placeholder , \
            logits = net , scope = "loss" )
    loss_summary = tf.summary.scalar( "loss" , loss )

    argmax = tf.argmax( net , 1 , name = "argmax" )
    acc = tf.reduce_mean(\
            tf.cast( tf.equal ( label_placeholder , argmax ) , tf.float32 ) , name = "acc" )
    acc_summary = tf.summary.scalar( "accuracy" , acc )

    trainable_list = tf.trainable_variables()
    train_op = tf.train.AdamOptimizer( learning_rate = \
            0.0001 ).minimize( loss , var_list = trainable_list )

    summary_merged = tf.summary.merge( [ loss_summary , acc_summary ] )

    train_writer = tf.summary.FileWriter( './tflog/train_OULU_p4c1_mobileNet_140_224' , graph = tf.get_default_graph() )
    test_writer = tf.summary.FileWriter( './tflog/test_OULU_p4c1_mobileNet_140_224' )

    saver = tf.train.Saver()
    sess.run( tf.global_variables_initializer() )

    for i in range( 10000 ):
        train_images , train_labels = sess.run( train_data_ops )
        _ , ACC , LOSS, SUMMARY = sess.run( \
                [train_op , acc , loss , summary_merged ] , \
                feed_dict = { imgs: train_images , \
                label_placeholder : train_labels } )

        train_writer.add_summary( SUMMARY , i )
        print( "iter = %d , loss = %f "  %( i , LOSS ) )

        if i% 200 == 0:
            save_path = saver.save(sess, "./tmp_OULU_p4c1_mobileNet_140_224/model.ckpt")
            print("Model saved in path: %s" % save_path)
       
        if i%50 == 0:
            test_images , test_labels = sess.run( test_data_ops )

            ACC , LOSS , SUMMARY = sess.run( \
                    [ acc , loss , summary_merged ] , \
                    feed_dict = { imgs: test_images , \
                    label_placeholder : test_labels } )

            test_writer.add_summary( SUMMARY , i )
            print( "accuracy = %f" % ACC )

    train_writer.close()
    test_writer.close()


    # 
    # export the final model
    #

    builder = tf.saved_model.builder.SavedModelBuilder(export_path)
    tensor_info_input  = tf.saved_model.utils.build_tensor_info( imgs )
    tensor_info_output = tf.saved_model.utils.build_tensor_info( net )

    prediction_signature = (
            tf.saved_model.signature_def_utils.build_signature_def(
                inputs  = { 'images' : tensor_info_input } , 
                outputs = { 'scores' : tensor_info_output} , 
                method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME))

    builder.add_meta_graph_and_variables(
            sess, [tf.saved_model.tag_constants.SERVING],
            signature_def_map={
                'predict_images':
                prediction_signature,
                },
            main_op=tf.tables_initializer(),
            strip_default_attrs=True)
    builder.save()
