# -*- coding: utf-8 -*-
# !/usr/bin/python3
"""
Author :      wu
Description :
"""

import tensorflow as tf
import numpy as np
import tensorflow.keras.backend as k
from tensorflow.keras import layers, models, metrics, losses, callbacks
import json

import optimizers

# LambdaCallback
json_log = open("./json_log.json", mode="wt", buffering=1)
json_logging = callbacks.LambdaCallback(
    on_batch_end=lambda epoch, logs: json_log.write(json.dumps(dict(epoch=epoch, **logs)) + "\n")
    , on_train_end=lambda logs: json_log.close())


def scheduler(epoch):
    lr = 0.1
    if epoch > 12:
        lr *= 0.5e-3
    elif epoch > 10:
        lr *= 1e-3
    elif epoch > 8:
        lr *= 1e-2
    elif epoch > 4:
        lr *= 1e-1
    print('Learning rate: ', lr)
    return lr


#
class LearningRateScheduler(callbacks.Callback):
    def __init__(self, schedule, verbose=0):
        super(LearningRateScheduler, self).__init__()
        self.schedule = schedule
        self.verbose = verbose

    def on_epoch_begin(self, epoch, logs=None):
        if not hasattr(self.model.optimizer, "lr"):
            raise ValueError("optimizer must have lr")
        try:
            lr = float(k.get_value(self.model.optimizer.lr))
            lr = self.schedule(epoch, lr)
        except TypeError:
            lr = self.schedule(epoch)

        if not isinstance(lr, (tf.Tensor, float, np.float32, np.float64)):
            raise ValueError("the output of the schedule function should be float")
        k.set_value(self.model.optimizer.lr, k.get_value(lr))

        if self.verbose > 0:
            print("\nepoch %05d: learning rate schedule reducing learning rate to %s."
                  .format(epoch + 1, lr))

    def on_epoch_end(self, epoch, logs=None):
        logs = logs or {}
        logs["lr"] = k.get_value(self.model.optimizer.lr)


def main():

    model = optimizers.FakeModel(tf.constant(1.0), tf.constant(-2.0), tf.constant(1.0))
    model.build()
    model.summary()
    model.compile(optimizer=tf.keras.optimizers.SGD(learning_rate=0.01),
                  loss=losses.binary_crossentropy)
    history = model.fit(tf.zeros([100, 2]), tf.ones(100), batch_size=2, epochs=10
                        , callbacks=[LearningRateScheduler(scheduler)])


if __name__ == "__main__":
    main()
