# -*- coding: utf-8 -*-

import numpy as np
import tensorflow as tf
import cv2

import loss
import yolt
import load

data_path = '../process/'               ###标签和图片在统一路径下
lable_path = data_path + 'label.txt'    ###标签
image_size = 416                        ###图像大小
cells = 26*26                           ###格子数
# cell_vector_size = 5                    ###格子向量
cell_vector_size = 1                    ###格子向量
saver_step = 1                          ###参数保存步长，表示多少轮保存一次参数

round_size = 10
begin_index = 10

label_size = cells * cell_vector_size   ###输出

input_tensor = tf.placeholder(tf.float32, (None, image_size, image_size, 3))
_lable = tf.placeholder(tf.float32, (None, label_size))

output = yolt.yolt(input_tensor, cell_vector_size)
loss = loss.loss(output, _lable)

optimizer = tf.train.AdamOptimizer(0.1)
train = optimizer.minimize(loss)

saver=tf.train.Saver(max_to_keep=1)
count = 0
with tf.Session() as sess:

    data = load.load(lable_path, round_size)            ###load(path, c)训练集有c个元素

    model_file=tf.train.latest_checkpoint(data_path)    ###加载模型
    if model_file:                                      ###判断模型是否存在，如果已有模型，则用模型数据初始化
        saver.restore(sess, model_file)
    else:                                               ###否则，随机初始化
        print('No model!')           ###提示一下，不要搞错了
        exit(-1)


    for i in range(round_size):
        image, label = data.load(1)                     ###load(a)每轮取b张，建议改大一点试一下（30），过大则内存溢出
        image = np.reshape(image, [-1, image_size, image_size, 3]).astype(np.float32)
        label = np.reshape(label, [-1, label_size]).astype(np.float32)

        _output, _loss = sess.run([output, loss], feed_dict={input_tensor: image,  _lable: label})

        # _loss = sess.run(loss, feed_dict={output: np.round(_output), _lable: label})

        # print(np.round(np.array(_output) - np.array(min(np.array(_output)))), np.array(label))
        '''
        显示损失函数的值，方便观察，耗时影响可以忽略
        '''
        count += 1
        print(count, ': ', _loss)
        
    sess.close()
