import pickle
import tensorflow as tf
import numpy as np
import os
import sys

FEATURE_PICKLE_PATH = r'_save\trans_learn_tf1x_on_tf2x_1of3_extract_feature.py\v7.0\bottleneck.pickle'


def sep(label = '', cnt=32):
    print('-' * cnt, label, '-' * cnt, sep='')


tf.random.set_random_seed(1)
np.random.seed(1)

VER = 'v5.2'
ALPHA = 0.001  # learning rate
BATCH_SIZE = 32
N_EPOCHS = 20
FILE_NAME = os.path.basename(__file__)
SAVE_DIR = os.path.join('_save', FILE_NAME, VER)
SAVE_PREFIX = os.path.join(SAVE_DIR, 'epoch')
os.makedirs(SAVE_DIR, exist_ok=True)
if len(os.listdir(SAVE_DIR)) > 0:
    print(f'Ver {VER} is already trained. Add version number to start a new training.')
    sys.exit(0)

sep('Load features')
with open(FEATURE_PICKLE_PATH, 'br') as f:
    pickle_data = pickle.load(f)

idx2label = pickle_data['idx2label']
label2idx = pickle_data['label2idx']
print(idx2label)
print(label2idx)
N_CLS = len(idx2label.keys())

x_train = pickle_data['x_train'].reshape(-1, 2048)
y_train = pickle_data['y_train']
print('x_train', x_train.shape)
print('y_train', y_train.shape)

x_test = pickle_data['x_test'].reshape(-1, 2048)
y_test = pickle_data['y_test']
print('x_test', x_test.shape)
print('y_test', y_test.shape)

x_val = pickle_data['x_val'].reshape(-1, 2048)
y_val = pickle_data['y_val']
print('x_val', x_val.shape)
print('y_val', y_val.shape)


def data_loader(x, y):
    iters = int(np.floor(len(x) / BATCH_SIZE))
    for i in range(iters):
        yield x[i*BATCH_SIZE:(i + 1)*BATCH_SIZE], y[i*BATCH_SIZE:(i + 1)*BATCH_SIZE]


sep('model')
x = tf.placeholder(tf.float32, [None, 2048], name='ph_x')
y = tf.placeholder(tf.int32, [None], name='ph_y')

h = tf.layers.Dense(N_CLS)(x)

loss = tf.reduce_mean(
    tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=h)
)

optim = tf.train.AdamOptimizer(learning_rate=ALPHA).minimize(loss)

acc = tf.reduce_mean(
    tf.cast(
        tf.equal(
            tf.cast(y, dtype=tf.int32),
            tf.cast(tf.argmax(h, axis=1), dtype=tf.int32)
        ),
        tf.float32
    )
)

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())

    saver = tf.train.Saver(tf.global_variables(), max_to_keep=6)

    print('Training...')
    for epoch in range(N_EPOCHS):
        print(f'epoch # {epoch + 1}')
        avg_loss = 0.
        avg_acc = 0.
        cnt = 0
        for bx, by in data_loader(x_train, y_train):
            cnt += 1
            loss_v, _, acc_v = sess.run([loss, optim, acc], feed_dict={x: bx, y: by})
            # print(f'epoch#{epoch + 1}: #{cnt} loss = {loss_v}, acc = {acc_v}')
            avg_loss += loss_v
            avg_acc += acc_v
        avg_loss /= cnt
        avg_acc /= cnt
        print(f'epoch#{epoch + 1}, avg loss = {avg_loss}, avg acc = {avg_acc}')

        print('Saving...')
        file = saver.save(sess, SAVE_PREFIX, global_step=epoch)
        print(f'Saved to {file}')

        avg_loss = 0.
        avg_acc = 0.
        cnt = 0
        for bx, by in data_loader(x_val, y_val):
            cnt += 1
            loss_v, acc_v = sess.run([loss, acc], feed_dict={x: bx, y: by})
            avg_loss += loss_v
            avg_acc += acc_v
        avg_loss /= cnt
        avg_acc /= cnt
        print(f'epoch#{epoch + 1}, val avg loss = {avg_loss}, val avg acc = {avg_acc}')

    print('Testing ...')
    avg_loss = 0.
    avg_acc = 0.
    cnt = 0
    for bx, by in data_loader(x_test, y_test):
        cnt += 1
        loss_v, acc_v = sess.run([loss, acc], feed_dict={x: bx, y: by})
        avg_loss += loss_v
        avg_acc += acc_v
    avg_loss /= cnt
    avg_acc /= cnt
    print(f'test avg loss = {avg_loss}, test avg acc = {avg_acc}')
