"""A very simple MNIST classifer.

See extensive documentation at ??????? (insert public URL)
"""

import tensorflow as tf
import cv2
import time

x_data = []
y_data = []


def init_data():
    for i in range(10):
        file1 = "./origin_data/lunci1/{0}.png".format(i)
        file2 = "./origin_data/dianshu1/{0}.png".format(i)
        img = cv2.imread(file1, 0)
        x_data.append(img.reshape(-1))
        ry = [0] * 10
        ry[i] = 1
        y_data.append(ry)

        img1 = cv2.imread(file2, 0)
        x_data.append(img1.reshape(-1))
        ry1 = [0] * 10
        ry1[i] = 1
        y_data.append(ry1)


def train():
    # Create the model
    x = tf.placeholder("float", [None, 1764], name='x')
    W = tf.Variable(tf.zeros([1764, 10]))
    b = tf.Variable(tf.zeros([10]))
    y = tf.nn.log_softmax(tf.matmul(x, W) + b)
    tf.add_to_collection('pred_network', y)

    # Define loss and optimizer
    y_ = tf.placeholder("float", [None, 10])
    # cross_entropy = -tf.reduce_sum(y_*tf.log(y))
    cross_entropy = -tf.reduce_sum(y_ * y)
    train_step = tf.train.GradientDescentOptimizer(0.3).minimize(cross_entropy)
    init = tf.initialize_all_variables()
    sess = tf.Session()
    sess.run(init)
    saver = tf.train.Saver()
    for i in range(20):
        sess.run(train_step, feed_dict={x: x_data, y_: y_data})

    saver_path = saver.save(sess, "./model/model.ckpt", global_step=40)
    print("model saved in file: ", saver_path)

    # Test trained model
    correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))

    print(sess.run(accuracy, feed_dict={x: x_data, y_: y_data}))


def ceshi():
    with tf.Session() as sess1:
        new_saver = tf.train.import_meta_graph('./model/model.ckpt-40.meta')
        new_saver.restore(sess1, "./model/model.ckpt-40")
        graph = tf.get_default_graph()
        x = graph.get_operation_by_name('x').outputs[0]
        y = tf.get_collection("pred_network")[0]
        for i in range(20):
            start = time.clock()
            aa = tf.argmax(sess1.run(y, feed_dict={x: [x_data[i]]}), 1)
            print(i/2, "预测值是:", sess1.run(aa))
            print("耗时", time.clock() - start)

