# Replace vanila relu to guided relu to get guided backpropagation.
import tensorflow as tf

from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_nn_ops

@ops.RegisterGradient("GuidedRelu")
def _GuidedReluGrad(op, grad):
    return tf.where(0. < grad, gen_nn_ops._relu_grad(grad, op.outputs[0]), tf.zeros_like(grad))


import tensorflow as tf
import numpy as np
from nets import resnet_v1
slim = tf.contrib.slim

import sys, os
from absl import app
from absl import flags

import utils
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
class_num=764
image_size = 224
def preload(sess):
    images = tf.placeholder("float", [None,image_size, image_size, 3])
    preprocessed_images = utils.resnet_preprocess(images)
    with slim.arg_scope(resnet_v1.resnet_arg_scope()):
        with slim.arg_scope([slim.batch_norm], is_training=False):
            net, end_points = resnet_v1.resnet_v1_50(preprocessed_images, class_num, is_training=False)

            prob = end_points['predictions'] # after softmax
            print('prob:', prob)
            cost = (-1) * tf.reduce_sum(tf.multiply(tf.log(prob), tf.log(prob)), axis=1)
            target_conv_layer = end_points['resnet_v1_50/block4/unit_2/bottleneck_v1']
            # target_conv_layer = end_points['resnet_v1_50/block3/unit_5/bottleneck_v1']

            # gradient for partial linearization. We only care about target visualization class. 
            y_c = tf.reduce_sum(tf.multiply(net, tf.log(prob)), axis=1)
            print('y_c:', y_c)
            target_conv_layer_grad = tf.gradients(y_c, target_conv_layer)[0]
            print('target_conv_layer_grad:', target_conv_layer_grad)

            # Guided backpropagtion back to input layer
            gb_grad = tf.gradients(cost, images)[0]
            

            init = tf.global_variables_initializer()

            latest_checkpoint = "./resnet50/model.ckpt-152493"
            saver = tf.train.Saver()

        
    sess.run(init)
    saver.restore(sess, latest_checkpoint) 
    return prob,images,cost,target_conv_layer,y_c,target_conv_layer_grad,gb_grad,net;
def predict(image_dir,sess,prob,images,cost,target_conv_layer,y_c,target_conv_layer_grad,gb_grad,net):
    assert image_dir != None
    dirlist=image_dir.split(' ')
    batch_size=checklist(dirlist)
    print(str(batch_size)+' files detected')
    counter=0
    for imgdir in dirlist:
        img=utils.load_image(imgdir, normalize=False,size=(image_size,image_size))
        img=img.reshape((1, image_size, image_size, 3))
        if counter==0:
            img_v=img
        else:
            img_v = np.concatenate((img, img_v), 0)
        counter+=1


    prob_np = sess.run(prob, feed_dict={images: img_v})
    
    net_np, y_c_np, gb_grad_value, target_conv_layer_value, target_conv_layer_grad_value = sess.run([net, y_c, gb_grad, target_conv_layer, target_conv_layer_grad], feed_dict={images: img_v})    
    for i in range(batch_size):
        utils.print_prob(prob_np[i], './labels.txt')
        utils.visualize(img_v[i], target_conv_layer_value[i], target_conv_layer_grad_value[i], gb_grad_value[i],i+1,(image_size,image_size))
        os.system("pause");
    print('finish')
    return;
def checklist(dirllist):
    for imgfile in dirllist:
        if not os.path.isfile(imgfile) or not imgfile.endswith('.jpg'):
            raise Exception(imgfile+" does not exist or is not a jpg file") 
    return len(dirllist);
        
        
        
if __name__=="__main__":
    print("main")
    with tf.Session() as sess:
        prob,images,cost,target_conv_layer,y_c,target_conv_layer_grad,gb_grad,net=preload(sess)
        while(True):
            image_dir = input("please input your file dir or type 'quit':")
            if (image_dir=='quit'):
                break;
            elif (image_dir==''):
                continue;
            try:
                imagelist=image_dir.split(' ')
                checklist(imagelist)
            except Exception as e:
                print (e)
                continue;
            predict(image_dir,sess,prob,images,cost,target_conv_layer,y_c,target_conv_layer_grad,gb_grad,net)

    #     print("net_np:", net_np)
    #     print("y_c_np:", y_c_np)
    #     print("gb_grad_value:", gb_grad_value)
    #     print("target_conv_layer_value:", target_conv_layer_value)
    #     print("target_conv_layer_grad_value:", target_conv_layer_grad_value)

