
import tensorflow as tf
import random
import struct

print random.random()


def read_lable(lable_file):
	with open(lable_file) as f:
		msb = f.read(4)
		items = f.read(4)
#		print items
		# big endian
		items = struct.unpack('>i', items)[0]
		print "lable: ", items
		lables = []
		for i in xrange(items):
			lable = f.read(1)
			lable = struct.unpack("B", lable)[0]
			lables.append(lable)
#			print "%d"%(lable)
#			break
		return lables


def read_image(image_file):
	with open(image_file) as f:
		msb = f.read(4)
		items = f.read(4)
		items = struct.unpack('>i', items)[0]
		rows = struct.unpack('>i', f.read(4))[0]
		columns = struct.unpack('>i', f.read(4))[0]
		print "file: %s, itmes: %d, row:%d, col:%d"%(image_file, items, rows, columns)
		images = []
		for i in xrange(items):
			# read row
			image = []
			for r in xrange(rows):
				row = []
				for c in xrange(columns):
					pix = struct.unpack('B', f.read(1))[0]
					row.append(pix)
				image.append(row)
#				image.append(f.read(columns))
			images.append(image)
#			break
		return images

from pylab import *

def show_img(img_arr, name):
#	imshow(img_arr)
#	show()
	imsave(name + ".png", img_arr)	

def gen_org_image(rimage, lables):
	ofs = open("test.jpg", "w")
	#for image in rimage:
	for i in xrange(len(lables)):
		image = rimage[i]
		print lables[i]
		show_img(image)
		break

rimage = read_image('train-images-idx3-ubyte')
lables = read_lable("train-labels-idx1-ubyte")

print "images: %d, lables: %d"%(len(rimage), len(lables))
#gen_org_image(rimage, lables)


def map_image(image):
	mimg = []
	for r in xrange(len(image)):
		rows = []
		for pix in image[r]:
			cols = []
			cols.append(pix)
			rows.append(cols)
#			mimg.append(pix)
		mimg.append(rows)
	
	return mimg
def map_lable(lable):
	zero_list = [ 0 for i in xrange(10) ]
#	print "type: ", type(lable), lable
	zero_list[lable] = 1
	return zero_list

inx_map = {}
def random_inx(len_n):
	import random
#	if inx_map.has_key(len_n):
#		return inx_map[len_n]
#	inx_map[len_n] = 0
#	return 0
	r = random.random()
	r = int( r * 90000 % len_n)
	print r 
	return r
#return int(r)

print "test data parse"



train_data = (rimage, lables)

print "test data parse finish"

def batch_img(data, n):
	xs_img = []
	ys_lable = []
	_images = data[0]
	_lables = data[1]
	len_n = len(_images)
	i = random_inx(len_n)
	img_name_p = "train-"
	if len_n < 60000:
		img_name_p = "test-"
#	inx_map[len_n] += n
	for j in xrange(n):
		if i >= len_n:
			i = 0
		#show_img(_images[i], img_name_p +str(_lables[i]) + "_" + str(i))
		xs_img.append(map_image(_images[i]))
		ys_lable.append(map_lable(_lables[i]))
		i += 1
	
	return xs_img, ys_lable
	

print "tf start"
#x = tf.placeholder("float", [None, 784])
x = tf.placeholder("float", [None, 28, 28, 1])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))

XX = tf.reshape(x, [-1, 784])

#y = tf.nn.softmax(tf.matmul(x, W) + b)
y = tf.nn.softmax(tf.matmul(XX, W) + b)

y_ = tf.placeholder("float", [None, 10])

cross_entropy = -tf.reduce_sum(y_*tf.log(y))

train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy)

init = tf.initialize_all_variables()

print "start and init session"
sess = tf.Session()
sess.run(init)
def train_(n):
	#for i in range(1000):
	for i in range(n):
		print "train ", i
		batch_xs, batch_ys = batch_img(train_data, 100)
		sess.run(train_step, feed_dict={x: batch_xs, y_ : batch_ys})
		del batch_xs
		del batch_ys

train_(1000)
print "train finish"
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))

accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))

del train_data
#del train_data[0]
#del train_data[1]
del rimage
del lables
#del train_data
test_images = read_image("t10k-images-idx3-ubyte")
test_lables = read_lable("t10k-labels-idx1-ubyte")
test_data = (test_images, test_lables)


test_xs, test_ys = batch_img(test_data, 100)
print "ys: ", len(test_ys), len(test_ys[0])


#print accuracy.eval(feed_dict={x: test_xs, y_: test_ys})

#print test_ys
#print y
#print sess.run(tf.argmax(y, 1), feed_dict={x: test_xs, y_: test_ys})
print sess.run(accuracy, feed_dict={x: test_xs, y_: test_ys})

 

	
sess.close()
	

