import numpy as np
import matplotlib.pyplot as plt
import caffe
import os
from PIL import Image
# set display defaults
plt.rcParams['figure.figsize'] = (10, 10)        # large images
plt.rcParams['image.interpolation'] = 'nearest'  # don't interpolate: show square pixels
plt.rcParams['image.cmap'] = 'gray'

caffe.set_mode_cpu()
caffe_root="./"
model_def = caffe_root + 'Deploy/deploy.prototxt'
model_weights = caffe_root + 'verify_code_iter_10000.caffemodel'
# ./classification
# /Users/rogerluo/Desktop/pyopnecv/caffedemo/verifycode/Deploy/deploy.prototxt
# /Users/rogerluo/Desktop/pyopnecv/caffedemo/verifycode/verify_code_iter_10000.caffemodel
# /Users/rogerluo/Desktop/pyopnecv/caffedemo/verifycode/mean.binaryproto
# /Users/rogerluo/Desktop/pyopnecv/caffedemo/verifycode/Synset/synset_words.txt
# /Users/rogerluo/Desktop/pyopnecv/caffedemo/verifycode/Pic/1001-2-t.jpg

# binary_mean ="/Users/rogerluo/Desktop/pyopnecv/caffedemo/verifycode/mean.binaryproto"
# net = caffe.Net(model_def,model_weights,caffe.TEST)
# binary_mean_npy = "/Users/rogerluo/Desktop/pyopnecv/caffedemo/verifycode/mean_binaryproto.npy"
# blob = caffe.proto.caffe_pb2.BlobProto()
# bin_mean = open( binary_mean , 'rb' ).read()
# print bin_mean
# blob.ParseFromString(bin_mean)
# arr = np.array( caffe.io.blobproto_to_array(blob) )
# npy_mean = arr[0]
# np.save( binary_mean_npy , npy_mean )

net = caffe.Net(model_def,model_weights,caffe.TEST)
binary_mean_npy = "/Users/rogerluo/Desktop/pyopnecv/caffedemo/verifycode/mean_binaryproto.npy"

# # load the mean ImageNet image (as distributed with Caffe) for subtraction
#/Users/rogerluo/Desktop/python/caffebuild/caffe/python/caffe/imagenet/ilsvrc_2012_mean.npy
mu = np.load(binary_mean_npy)
mu = mu.mean(1).mean(1)  # average over pixels to obtain the mean (BGR) pixel values
print 'mean-subtracted values:', zip('BGR', mu)

#
print net.blobs['data'].data.shape

transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
#
transformer.set_transpose('data', (2,0,1))  # move image channels to outermost dimension
transformer.set_mean('data', mu)            # subtract the dataset-mean value in each channel
transformer.set_raw_scale('data', 255)      # rescale from [0, 1] to [0, 255]
transformer.set_channel_swap('data', (2,1,0))  # swap channels from RGB to BGR
net.blobs['data'].reshape(50,3,26, 22)  # image size is 26x22

image = caffe.io.load_image(caffe_root + 'Pic/1001-2-t.jpg')
transformed_image = transformer.preprocess('data', image)
net.blobs['data'].data[...] = transformed_image

### perform classification
output = net.forward()

output_prob = output['prob'][0]  # the output probability vector for the first image in the batch

print 'predicted class is:', output_prob.argmax()

labels_file = caffe_root + 'Synset/synset_words.txt'
labels = np.loadtxt(labels_file, str, delimiter='\n')
print 'output label:', labels[output_prob.argmax()]

# sort top five predictions from softmax output
top_inds = output_prob.argsort()[::-1][:5]  # reverse sort and take five largest items

print 'probabilities and labels:',zip(output_prob[top_inds], labels[top_inds])

for layer_name, blob in net.blobs.iteritems():
    print layer_name + '\t' + str(blob.data.shape)

for layer_name, param in net.params.iteritems():
    print layer_name + '\t' + str(param[0].data.shape), str(param[1].data.shape)


plt.show()





