import os
import argparse
import os.path
import re
import sys
import tarfile

import numpy as np 
from six.moves import urllib
import tensorflow as tf 
#print(os.getcwd())




MODEL_FILE = "/home/cumtzd/9hw-source/models/research/slim/freezed_inceptionv3_car_136189.pb"
LABEL_FILE = "/home/cumtzd/tmp/pj_vechicle/labels.txt" 

class Nodelookup(object):
	"""docstring for Nodelookup"""
	def __init__(self, label_path=None):
		if not label_path:
			tf.logging.fatal('please specify the label file.')
			return
		self.node_lookup=self.load(label_path)

	def load(self,label_path):
		'''
		Loads a human readable English name for each softmax node.
		Args:
		    lable_lookup_path:string UID to integer node ID
		    uid_lookup_path:string UID to human-readable string
		Returns:
		    dict from integer node ID to human-readable string.
		'''
		if not tf.gfile.Exists(label_path):
			tf.logging.fatal('File does not exist %s',lable_path)

		#Loads mapping from string UID to human-readable string 
		proto_as_ascii_lines = tf.gfile.GFile(label_path).readlines()
		id_to_human={}
		for line in proto_as_ascii_lines:
			if line.find(':')<0:
				continue
			_id,human = line.rstrip('\n').split(':')
			id_to_human[int(_id)] = human

		return id_to_human

	def id_to_string(self,node_id):
		if node_id not in self.node_lookup:
			return ''
		return self.node_lookup[node_id]

def create_graph(model_file=None):
	#Creates graph from saved graph_def.pb.
	if not model_file:
		model_file = MODEL_FILE
	with open(model_file,'rb') as f:
		graph_def = tf.GraphDef()
		graph_def.ParseFromString(f.read())
		_ = tf.import_graph_def(graph_def,name='')


def run_inference_on_image(image,model_file=None):
	'''
	Runs inference on an image.
	Args:
	    image:Image file name
	Returns:
	    Nothing
	'''
	if not tf.gfile.Exists(image):
		tf.logging.fatal('File does not exist %s',image)
	image_data=open(image,'rb').read()
	
	#Creates graph from saved GraphDef.
	create_graph(MODEL_FILE)

	with tf.Session() as sess:
		#Some useful tensors:
		#'softmax:0':A tensor containing the normalized prediction across 1000 labels.
		#'pool_3:0':A tensor containing the next-to-last layer containing 2048 float description of the image
		#'DecodeJpeg/contents:0'A tensor containing a string providing JPEG encoding of the image
		#Runs the softmax tensor by feeding the image_data as input to the graph.
		softmax_tensor = sess.graph.get_tensor_by_name('output:0')
		predictions = sess.run(softmax_tensor,{'input:0':image_data})
		predictions = np.squeeze(predictions)
        #predictions是764位的一维向量，数值就是该位置编号对应分类的置信度
		#print(predictions)
		#Creates node ID --> English string lookup.
		node_lookup = Nodelookup(LABEL_FILE)

		#argsort()将predictions中的元素从小到大排列，[-5:]取出较大的五个，[-1:]取出最大的一个，[::-1]倒序排列
		top_k = predictions.argsort()[-1:][::-1]

		#top_names = []
		for node_id in top_k:
			human_string = node_lookup.id_to_string(node_id)
			#top_names.append(human_string)
			score = predictions[node_id]
			print('id:[%d] name:[%s] (score = %.5f)'%(node_id,human_string,score))

	return human_string,score,node_id

