from keras.optimizers import Adam, Adadelta, SGD
from keras.callbacks import Callback, ModelCheckpoint, LearningRateScheduler
from keras.backend.tensorflow_backend import set_session
import tensorflow as tf
from resnet3D import ResnetBuilder
from vgg13_shortcuts import vgg13_shortcuts_v2
import numpy as np
from train_config_all import get_config
import os
import global_config


def train_model(name, valid_folds, batchsize, crop_shape, shift_range, lr, nb_epoches, seed, weight_decay, gpus,
                lr_decay, candidate_file, candidate_vol_file, pretrain_model, pretrain_model_epoch, include_testset,
                prob_thresh, **kwargs):
    args = locals()
    args.update(args.pop('kwargs'))

    os.environ["CUDA_VISIBLE_DEVICES"] = gpus
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    set_session(tf.Session(config=config))
    tf.set_random_seed(seed)

    print('')
    print('*' * 20)
    for k in sorted(args.keys()):
        print('{k}: {v}'.format(k=k, v=args[k]))
    print('*' * 20)
    print('')

    config_dict = get_config(valid_folds, batchsize, crop_shape, shift_range, seed, candidate_file, candidate_vol_file,
                             include_testset, prob_thresh)
    train_generator = config_dict['train_generator']
    nb_train_samples = config_dict['nb_train_samples']
    valid_generator = config_dict['valid_generator']
    nb_valid_samples = config_dict['nb_valid_samples']

    np.random.seed(seed)

    kwargs['input_shape'] = config_dict['input_shape']
    num_classes = (2,)
    feature_names = ('Mal_2',)
    model = vgg13_shortcuts_v2((64, 64, 64, 1), 32, weight_decay, num_classes, feature_names)
    pretrain_model_path = os.path.join(global_config.model_folder, pretrain_model)
    pretrain_model_weights = os.listdir(pretrain_model_path)
    pretrain_model_weight = [x for x in pretrain_model_weights if x.startswith(pretrain_model_epoch)]
    if len(pretrain_model_weight) == 0:
        print('no model weights!')
    elif len(pretrain_model_weight) > 1:
        print('too many weights!')
    else:
        pretrain_model_weight = pretrain_model_weight[0]
    model.load_weights(os.path.join(global_config.model_folder, pretrain_model, pretrain_model_weight), by_name=True)
    model.compile(
        optimizer=Adam(lr=lr),
        loss=['sparse_categorical_crossentropy'],
        metrics=['sparse_categorical_accuracy']
    )

    os.makedirs(os.path.join(global_config.model_folder, name), exist_ok=True)
    with open(os.path.join(global_config.model_folder, name, 'args.log'), 'w') as out:
        for k in sorted(args.keys()):
            out.write('{k}: {v}\n'.format(k=k, v=args[k]))
    model_saveto = os.path.join(global_config.model_folder, name,
                                '''epoch-{epoch:04d}-loss:{loss:.4f}-val_loss:{val_loss:.4f}-acc:{val_sparse_categorical_accuracy:.4f}.hdf5''')

    def schedule(epoch, decay=lr_decay):
        return lr * decay ** epoch

    try:
        model.fit_generator(
            generator=train_generator,
            validation_data=valid_generator,
            nb_val_samples=nb_valid_samples,
            samples_per_epoch=batchsize * 500,
            nb_epoch=nb_epoches,
            verbose=1,
            callbacks=[
                ModelCheckpoint(model_saveto),
                LearningRateScheduler(schedule)
            ],
        )
    except KeyboardInterrupt:
        pass
