import tensorflow as tf
from tensorflow import saved_model as sm
from nets.nets_factory import get_network_fn, arg_scopes_map
import os
import numpy as np
import cv2

slim = tf.contrib.slim
np.set_printoptions(threshold=np.inf)


print("***********************************************************************************************")

tf.compat.v1.app.flags.DEFINE_string('checkpoint_dir', './my-data/43k_train/', "Directory where to read training checkpoints.")
tf.compat.v1.app.flags.DEFINE_string('output_dir', './serving_models/',"Directory where to export inference model.")
tf.compat.v1.app.flags.DEFINE_integer('model_version', 1,"Version number of the model.")
tf.compat.v1.app.flags.DEFINE_integer('classes_num', 3," numbers of class")
tf.compat.v1.app.flags.DEFINE_integer('image_size', 299,"Needs to provide same value as in training.")
tf.compat.v1.app.flags.DEFINE_string('ckpt_file','model.ckpt-171126','')
tf.compat.v1.app.flags.DEFINE_string('labels_file','./labels.txt','the path of labels_file: 0:normal ...')
tf.compat.v1.app.flags.DEFINE_string('model_name','inception_v4','mdoel of name')
tf.compat.v1.app.flags.DEFINE_string('signature_key','porn_detect','wc')

FLAGS = tf.compat.v1.app.flags.FLAGS

print("***********************************************************************************************")

img_file = 'E:/datasets/1154681822/2020-05-26_06-37-30.500426_1_normal.jpeg'


def get_img_tensor(read_t=0):
    if not read_t:
        imgc = cv2.imread(img_file)
        imgc = cv2.resize(imgc, (299, 299)) # gbr
        imgc = cv2.cvtColor(imgc,cv2.COLOR_BGR2RGB)
        result = imgc.reshape([1,299,299,3])
    else:
        img_read_op = tf.io.read_file(img_file)
        img = tf.image.decode_jpeg(img_read_op, channels=3) # uint8

        img = tf.expand_dims(tf.image.resize(img, [299, 299]),0 # float32
                            )
        img = tf.cast(img,tf.uint8)
        # img = tf.expand_dims(img, 0)

        sess = tf.compat.v1.Session()
        result = sess.run(img)
    return result

'''
#使用slim 加载 ckpt

with tf.Graph().as_default() as g:
    with slim.arg_scope(inception_v4_arg_scope()):
        input_op = tf.placeholder(dtype=tf.float32,shape=[1,299,299,3],name='input')
        logits, end_points = inception.inception_v4(input_op, num_classes=3,is_training=False)

        probs = tf.nn.softmax(logits)
        config = tf.ConfigProto(
                gpu_options={
                    'allow_growth': 1,
                    # 'per_process_gpu_memory_fraction': 0.01
                },
                allow_soft_placement=True,
                log_device_placement=False,
            )

        img = get_img_tensor()
        init_fn = slim.assign_from_checkpoint_fn(
                     os.path.join('./my-data/43k_train/', 'model.ckpt-171126'), slim.get_model_variables("InceptionV4"))
        print('开始会话')
        with tf.Session(config=config) as sess:
            init_fn(sess)
            input = sess.graph.get_tensor_by_name('input:0')
            out = sess.graph.get_tensor_by_name('InceptionV4/Logits/Predictions:0')
            y = sess.run(out,feed_dict={input:img})
            # print(y)
'''


def preprocess_image(image_buffer):

    # image_buffer = tf.image.decode_jpeg(image_buffer, channels=3)
    image = tf.image.convert_image_dtype(image_buffer, dtype=tf.float32) # 如果是float 进来就不处理了
    image = tf.image.central_crop(image, central_fraction=0.875) # 此操作后size就变了
    # # Resize the image to the original height and width.
    image = tf.expand_dims(image, 0)
    image = tf.image.resize_bilinear(image, [299, 299], align_corners=False)
    image = tf.squeeze(image, [0])
    # Finally, rescale to [-1,1] instead of [0, 1)
    image = tf.subtract(image, 0.5)
    image = tf.multiply(image, 2.0)

    return image


def export():
    with tf.Graph().as_default() as g2:
        f = open(FLAGS.labels_file,'r')
        lines = f.readlines()
        names = {}  # {0: 'normal', 1: 'porn', 2: 'sexy'}
        for line in lines:
            i, label = line.split(':')
            names[int(i)] = label.strip()
        names_tensor = tf.constant(list(names.values()))
        names_lookup_table = tf.contrib.lookup.index_to_string_table_from_tensor(names_tensor)

        # jpegs = tf.placeholder(tf.uint8,shape=[None,None,None,3],name='input') # 这样设置不限制传入图片size
        jpegs = tf.placeholder(tf.uint8,shape=[1,FLAGS.image_size,FLAGS.image_size,3],name='input') # 这个可以哦
        img = tf.map_fn(preprocess_image, jpegs, dtype=tf.float32)

        net_fn = get_network_fn(FLAGS.model_name,FLAGS.classes_num,is_training=False)
        with slim.arg_scope(arg_scopes_map[FLAGS.model_name]()):  # resnet_arg_scope
            # serialized_tf_example = tf.placeholder(tf.string, name='tf_example') #Tensor("tf_example:0", dtype=string)
            # print(serialized_tf_example)
            # feature_configs = {'image': tf.FixedLenFeature(shape=[], dtype=tf.string)}
            # tf_example = tf.parse_example(serialized_tf_example, feature_configs)
            # jpegs = tf.identity(tf_example['image'], name='x')  #<tf.Tensor 'ParseExample/ParseExample:0' shape=(?, 299, 299, 3) dtype=float32>

            logits, end_points = net_fn(img)
            probs = tf.nn.softmax(logits)

            topk_probs, topk_indices = tf.nn.top_k(probs, 1)
            topk_names = names_lookup_table.lookup(tf.to_int64(topk_indices))

            with tf.Session() as sess:

                saver = tf.train.Saver()
                ckpt = tf.train.get_checkpoint_state(FLAGS.checkpoint_dir)
                print(ckpt.model_checkpoint_path)
                saver.restore(sess,os.path.join(FLAGS.checkpoint_dir,FLAGS.ckpt_file))

                # input_node_name = sess.graph.get_tensor_by_name('input:0')
                # output_node_name = sess.graph.get_tensor_by_name('InceptionV4/Logits/Predictions:0')

                # res = sess.run(probs, feed_dict={jpegs: [tf.gfile.FastGFile(img_file, 'rb').read()]}) #发送二进制是成功啦 [[4.9104528e-06 9.9984705e-01 1.4797678e-04]]
                # res = sess.run(probs, feed_dict={jpegs: [get_img_tensor()]})
                # print(res)

                builder = tf.saved_model.builder.SavedModelBuilder(os.path.join(FLAGS.output_dir,str(FLAGS.model_version)))

                i = tf.saved_model.utils.build_tensor_info(jpegs)
                o = tf.saved_model.utils.build_tensor_info(probs)
                c = tf.saved_model.utils.build_tensor_info(topk_names)

                prediction_signature = (
                    tf.saved_model.signature_def_utils.build_signature_def(
                        inputs={'images': i},
                        outputs={'scores': o,
                                 'classes':c},
                        method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME))

                legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
                builder.add_meta_graph_and_variables(sess, [tf.saved_model.tag_constants.SERVING],
                                                     signature_def_map={FLAGS.signature_key: prediction_signature, },
                                                     legacy_init_op=legacy_init_op
                                                     )
                builder.save()
                print('Successfully exported model to %s' % FLAGS.output_dir)
                print("start test model ....")
                test_local_client(sess)


def test_local_client(sess=None):
    # with tf.Session(graph=tf.Graph()) as sess:
    if sess is None:
        sess = tf.Session()
    model_path = os.path.join(FLAGS.output_dir,str(FLAGS.model_version))
    print(model_path)
    meta_graph_def = tf.saved_model.loader.load(sess,[sm.tag_constants.SERVING],model_path)
    signature = meta_graph_def.signature_def
    images = signature[FLAGS.signature_key].inputs['images'].name
    classes = signature[FLAGS.signature_key].outputs['classes'].name
    scores = signature[FLAGS.signature_key].outputs['scores'].name
    print(sess.run([scores,classes], feed_dict={images: get_img_tensor()}))
    print('测试结果没毛病。。。。')


def main(_):
    # test_local_client()
    export()




if __name__ == '__main__':
    tf.app.run()
