#_*_coding:utf-8_*_
import tensorflow as tf
# from tensorflow.examples.tutorials.mnist import input_data
import tensorflow.contrib.slim as slim

import torch

from econet import ECONet
# from econetv1 import ECONet

import numpy as np


def test_graph():
    # Hyperparameters
    opt = {
        'weight_decay': 0.0, 
        'net2d_keep_prob': 0.5,
        'net3d_keep_prob': 0.5,
        'num_segments': 3,
        'num_classes': 400 
        }

    # Input data
    with tf.name_scope('input'):
        x = tf.placeholder(tf.float32, [None, 224, 224, 3], name = 'x_input')
        y = tf.placeholder(tf.float32, [None, 10], name = 'y_input')

    logits, _ = ECONet(x, opt=opt)

    # Initialize 
    init = tf.global_variables_initializer()

    with tf.Session() as sess:
        sess.run(init)
        # write graph 
        writer = tf.summary.FileWriter('logs/', sess.graph)

        print('Done')


def calc_param():
    # Hyperparameters
    opt = {
        'weight_decay': 0.0, 
        'net2d_keep_prob': 0.5,
        'net3d_keep_prob': 0.5,
        'num_segments': 3,
        'num_classes': 400 
    }

    # Input data
    with tf.name_scope('input'):
        x = tf.placeholder(tf.float32, [None, 224, 224, 3], name = 'x_input')
        y = tf.placeholder(tf.float32, [None, 10], name = 'y_input')

    logits, _ = ECONet(x, opt=opt)

    total_parameters = 0

    # vars_list = tf.global_variables() # shows every trainable variable being used.
    vars_list = tf.trainable_variables() # shows every variable being used.
    
    for variable in vars_list:
        variable_parameters = 1
        for dim in variable.get_shape():
            variable_parameters *= dim.value
        print("{:>70} : {}".format(variable.name, variable_parameters))
        total_parameters += variable_parameters

    print(len(vars_list))
    print("Total number of trainable parameters: %d" % (total_parameters))


def save_param(moving_variables):
    # Hyperparameters
    opt = {
        'weight_decay': 0.0, 
        'net2d_keep_prob': 0.5,
        'net3d_keep_prob': 0.5,
        'num_segments': 3,
        'num_classes': 400 
    }

    # Input data
    with tf.name_scope('input'):
        x = tf.placeholder(tf.float32, [None, 224, 224, 3], name = 'x_input')
        y = tf.placeholder(tf.float32, [None, 10], name = 'y_input')

    logits, _ = ECONet(x, opt=opt)

    total_parameters = 0

    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())

        a = np.zeros((7,7,3,64))
        for i, variable in enumerate(tf.trainable_variables()):
            if i < 1:
                assert a.shape == variable.shape, 'Assigned variable shape not equals to original variable shape'
                print(variable.shape)
                sess.run(variable.assign(a))
                print(variable.eval())
                break

        saver = tf.train.Saver()
        saver.save(sess, 'experiments/test_model/test.ckpt')   


def load_param():
    # Hyperparameters
    opt = {
        'weight_decay': 0.0, 
        'net2d_keep_prob': 0.5,
        'net3d_keep_prob': 0.5,
        'num_segments': 3,
        'num_classes': 400 
    }

    # Input data
    with tf.name_scope('input'):
        x = tf.placeholder(tf.float32, [None, 224, 224, 3], name = 'x_input')
        y = tf.placeholder(tf.float32, [None, 10], name = 'y_input')

    logits, _ = ECONet(x, opt=opt)

    total_parameters = 0

    with tf.Session() as sess:
        saver = tf.train.Saver() 
        sess.run(tf.global_variables_initializer())
        saver.restore(sess, 'experiments/test_model/test.ckpt')

        # a = np.zeros((7,7,3,64))
        for i, variable in enumerate(tf.trainable_variables()):
            if i < 1:
                # assert a.shape == variable.shape, 'Assigned variable shape not equals to original variable shape'
                print(variable.shape)
                # sess.run(variable.assign(a))
                print(variable.eval())
                break


def print_param():
    path = 'experiments/201019-165755/ckpt/best.ckpt'

    opt = {
        'weight_decay': 0.0, 
        'net2d_keep_prob': 0.5,
        'net3d_keep_prob': 0.5,
        'num_segments': 3,
        'num_classes': 400 
    }

    # Input data
    with tf.name_scope('input'):
        x = tf.placeholder(tf.float32, [None, 224, 224, 3], name = 'x_input')
        y = tf.placeholder(tf.float32, [None, 10], name = 'y_input')

    logits, _ = ECONet(x, opt=opt)

    total_parameters = 0

    vars_list = tf.global_variables() # shows every trainable variable being used.
    # vars_list = tf.trainable_variables() # shows every variable being used.
    
    for variable in vars_list:
        print("{}".format(variable.name))
        # total_parameters += variable_parameters

    print(len(vars_list))
    print("Total number of trainable parameters: %d" % (total_parameters))   


if __name__ == '__main__':
    # test_graph()
    # calc_param()
    print_param()
    # save_param()
    # load_param()

