import pickle
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers, activations, losses, optimizers, metrics, callbacks
import numpy as np
import os

FEATURE_PICKLE_PATH = r'_save\trans_learn_tf1x_on_tf2x_1of3_extract_feature.py\v7.0\bottleneck.pickle'


def sep(label = '', cnt=32):
    print('-' * cnt, label, '-' * cnt, sep='')


tf.random.set_seed(1)
np.random.seed(1)

VER = 'v3.2'
ALPHA = 0.001  # learning rate
BATCH_SIZE = 64
N_EPOCHS = 8
FILE_NAME = os.path.basename(__file__)
SAVE_DIR = os.path.join('_save', FILE_NAME, VER)
SAVE_PATH = os.path.join(SAVE_DIR, 'trans_learn_self_model.dat')
LOG_DIR = os.path.join('_log', FILE_NAME, VER)

sep('Load features')
with open(FEATURE_PICKLE_PATH, 'br') as f:
    pickle_data = pickle.load(f)

idx2label = pickle_data['idx2label']
label2idx = pickle_data['label2idx']
print(idx2label)
print(label2idx)
N_CLS = len(idx2label.keys())

x_train = pickle_data['x_train'].reshape(-1, 2048)
y_train = pickle_data['y_train'].reshape(-1, 1)
print('x_train', x_train.shape)
print('y_train', y_train.shape)

x_test = pickle_data['x_test'].reshape(-1, 2048)
y_test = pickle_data['y_test'].reshape(-1, 1)
print('x_test', x_test.shape)
print('y_test', y_test.shape)

x_val = pickle_data['x_val'].reshape(-1, 2048)
y_val = pickle_data['y_val'].reshape(-1, 1)
print('x_val', x_val.shape)
print('y_val', y_val.shape)

sep('Load or train and save model')
if os.path.exists(SAVE_PATH):
    print('Loading model ...')
    model = keras.models.load_model(SAVE_PATH)
    print('Loaded model')

    model.summary()
else:
    print('Making model')
    model = keras.Sequential([
        layers.Dense(128, activation=activations.relu, input_shape=[2048]),
        layers.Dense(N_CLS, activation=activations.softmax),
    ])
    model.summary()

    model.compile(
        loss=losses.SparseCategoricalCrossentropy(from_logits=False),
        optimizer=optimizers.Adam(learning_rate=ALPHA),
        metrics=metrics.sparse_categorical_accuracy
    )
    print('Traing ...')
    model.fit(x_train, y_train,
              BATCH_SIZE, N_EPOCHS,
              validation_data=(x_val, y_val),
              validation_batch_size=BATCH_SIZE,
              callbacks=callbacks.TensorBoard(log_dir=LOG_DIR, update_freq='batch', profile_batch=0))
    print('Training over.')
    keras.models.save_model(model, SAVE_PATH)

sep('Evaluate model')
model.evaluate(x_test, y_test, batch_size=BATCH_SIZE)
