# -*- coding: utf-8 -*-
# !/usr/bin/python3
"""
Author :      wu
Description :
"""

import tensorflow as tf


x = tf.Variable(0, dtype=tf.float32, name="x")
optimizer = tf.keras.optimizers.SGD(learning_rate=0.01)


@tf.function
def minimizer():
    a = tf.constant(1.0, dtype=tf.float32)
    b = tf.constant(-2.0, dtype=tf.float32)
    c = tf.constant(1.0, dtype=tf.float32)

    while tf.constant(True):
        with tf.GradientTape() as tape:
            y = a * tf.pow(x, 2) + b * x + c
        dy_dx = tape.gradient(y, x)

        optimizer.apply_gradients(grads_and_vars=[(dy_dx, x)])

        if tf.math.mod(optimizer.iterations, 1) == 0:
            tf.print(tf.strings.format("step = {}, x= {}", (optimizer.iterations, x)))
        if tf.abs(dy_dx) < tf.constant(0.00001) or tf.greater_equal(optimizer.iterations, tf.constant(10, dtype=tf.int64)):
            break
    y = a * tf.pow(x, 2) + b * x + c

    return y


def func():

    a = tf.constant(1.0)
    b = tf.constant(-2.0)
    c = tf.constant(1.0)

    y = a * tf.pow(x, 2) + b * x + c

    return y


@tf.function
def train(epoch=100):

    for _ in range(epoch):
        optimizer.minimize(func, [x])
        tf.print("step = ", optimizer.iterations)
        tf.print("step = {}, y = {}".format(optimizer.iterations, func()))
        tf.print(tf.strings.format("step = {}, y= {}", (optimizer.iterations, func())))

    return func()


class FakeModel(tf.keras.models.Model):

    def __init__(self, a, b, c):
        super(FakeModel, self).__init__()
        self.a = a
        self.b = b
        self.c = c

    def build(self):
        self.x = tf.Variable(0.0, name="x", dtype=tf.float32)
        self.built = True

    def call(self, features):

        loss = self.a * tf.pow(self.x, 2) + self.b * self.x + self.c
        return tf.ones_like(features) * loss


def my_loss(y_true, pred):

    return tf.reduce_mean(pred)


def main():
    y = minimizer()
    tf.print(y, x)
    train(10)
    tf.print("y = {}, x= {}".format(func(), x))

    model = FakeModel(tf.constant(1.0), tf.constant(-2.0), tf.constant(1.0))
    model.build()
    model.summary()
    model.compile(optimizer=tf.keras.optimizers.SGD(learning_rate=0.01), loss=my_loss)
    history = model.fit(tf.zeros([100, 2]), tf.ones(100), batch_size=2, epochs=10)

    tf.print("y = {}, x = {}".format(model.call(tf.constant(0.0)), model.x))


if __name__ == "__main__":
    main()
