
import os,logging
import tensorflow as tf


def print_num_of_total_parameters(output_detail=False, output_to_logging=False):
    total_parameters = 0
    parameters_string = ""

    for variable in tf.trainable_variables():

        shape = variable.get_shape()
        variable_parameters = 1
        for dim in shape:
            variable_parameters *= dim.value
        total_parameters += variable_parameters
        if len(shape) == 1:
            parameters_string += ("%s %d, " % (variable.name, variable_parameters))
        elif len(shape) == 0:
            parameters_string += ("%s %s=%d, " % (variable.name, str(shape), variable_parameters))
        else:
            parameters_string += ("%s %s=%d, " % (variable.name, str(shape), variable_parameters))

    if output_to_logging:
        if output_detail:
            logging.info(parameters_string)
        logging.info("Total %d variables, %s params" % (len(tf.trainable_variables()), "{:,}".format(total_parameters)))
    else:
        if output_detail:
            print(parameters_string)
        print("Total %d variables, %s params" % (len(tf.trainable_variables()), "{:,}".format(total_parameters)))


train_path = './train'

tf.reset_default_graph()
restore_graph = tf.Graph()

with restore_graph.as_default():
    x = tf.placeholder(tf.float32,shape=[],name='input')
    y = tf.Variable(initial_value=0,dtype=tf.float32,name="y_variable")
    update_y = y.assign(x)
    saver = tf.train.Saver(max_to_keep=3)
    init_op = tf.global_variables_initializer()

ckpt = tf.train.get_checkpoint_state(train_path)
print("loading {}".format(ckpt.model_checkpoint_path))

restore_step = int(ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1])


with tf.Session(graph=restore_graph) as restore_sess:
    restore_saver = tf.train.import_meta_graph(ckpt.model_checkpoint_path + '.meta')
    restore_saver.restore(restore_sess,tf.train.latest_checkpoint(train_path))
    # print(restore_sess.run("y_variable:0"))
    variable_names = [v.name for v in tf.trainable_variables()]
    print( " restore finished has {} variables".format(len(variable_names)) )
    for var in variable_names:
        print(var+" {},".format(restore_sess.run(var)))

    for step in range(restore_step,40000):
        y_result = restore_sess.run(update_y,feed_dict={x:step})
        if step %4000 == 0:
            restore_saver.save(restore_sess,os.path.join(train_path,'module'),global_step=step)
            # print_num_of_total_parameters(output_detail=True)