import tensorflow as tf
import numpy as np
import sys
import os
import random
from python_ai.common.xcommon import *
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split

random.seed(777)
np.random.seed(777)
tf.random.set_seed(777)
filename = os.path.basename(__file__)

is_use_cache = True
ver = 'v1.0'
n_pos, n_neg = 2000, 2000
L1 = 200
L2 = 300
alpha = 0.01
n_epoch = 4
batch_size = 128

# get x and y
def get_rand_cycle(n, x_r):
    x_angle = np.random.uniform(0, 2 * np.pi, n).reshape([-1, 1])
    x1 = x_r * np.cos(x_angle)
    x2 = x_r * np.sin(x_angle)
    x = np.concatenate([x1, x2], axis=1)
    x += np.random.normal(0., 3., [n, 2])
    return x


x_pos = get_rand_cycle(n_pos, 20.)
x_neg = get_rand_cycle(n_neg, 5.)
x = np.concatenate([x_pos, x_neg], axis=0, dtype=np.float32)

y_pos = np.ones([n_pos, 1])
y_neg = np.zeros([n_neg, 1])
y = np.concatenate([y_pos, y_neg], axis=0, dtype=np.float32)

x_train, x_test, y_train, y_test = train_test_split(x, y, train_size=0.7, random_state=777)
x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, train_size=0.9, random_state=777)
m_train, n = x_train.shape

ds = tf.data.Dataset.from_tensor_slices((x_train, y_train))\
    .shuffle(buffer_size=m_train)\
    .batch(batch_size=batch_size)\
    .prefetch(buffer_size=tf.data.experimental.AUTOTUNE)

# visualize x and y
spr = 2
spc = 2
spn = 0
plt.figure(figsize=[12, 12])
spn += 1
plt.subplot(spr, spc, spn)
plt.scatter(x_pos[:, 0], x_pos[:, 1], s=1, color='r', label='positive')
plt.scatter(x_neg[:, 0], x_neg[:, 1], s=1, color='b', label='negative')
plt.legend()
plt.title('target')


# model
class MyBinClf(tf.Module):

    def __init__(self, n_units, name='my_bin_clf', **kwargs):
        super(MyBinClf, self).__init__(name=name, **kwargs)
        self.dense1 = tf.keras.layers.Dense(n_units[0], tf.nn.relu)
        self.dense2 = tf.keras.layers.Dense(n_units[1], tf.nn.relu)
        self.dense3 = tf.keras.layers.Dense(1, tf.nn.sigmoid)

    def __call__(self, x):
        x = self.dense1(x)
        x = self.dense2(x)
        x = self.dense3(x)
        return x

    @staticmethod
    def loss(y_true, y_pred):
        eps = 1e-7
        y_pred = tf.clip_by_value(y_pred, eps, 1 - eps)
        loss_mat = y_true * tf.math.log(y_pred) + (1 - y_true) * tf.math.log(1 - y_pred)
        return - tf.reduce_mean(loss_mat)

    @staticmethod
    def acc(y_true, y_pred):
        return tf.reduce_mean(tf.cast(
            tf.equal(y_true > 0.5, y_pred > 0.5),
            tf.float32
        ))


model = MyBinClf([L1, L2, 1])
model.opt = tf.keras.optimizers.RMSprop(learning_rate=alpha)


# gradient descent step
@tf.function
def train_step(bx, by, vx, vy):
    with tf.GradientTape() as tape:
        bh = model(bx)
        loss = MyBinClf.loss(by, bh)

    model.opt.minimize(loss, model.trainable_variables, tape=tape)

    acc = MyBinClf.acc(by, bh)

    vh = model(vx)
    vloss = MyBinClf.loss(vy, vh)
    vacc = MyBinClf.acc(vy, vh)
    return loss, acc, vloss, vacc


# train
def train(n_epoch):
    fw = tf.summary.create_file_writer('./_log/' + filename + '_' + ver)
    total_batch = int(np.ceil(m_train / batch_size))
    group = int(np.ceil(total_batch / 5))

    g_step = -1
    for epoch in range(n_epoch):
        i = -1
        for bx, by in ds:
            i += 1
            g_step += 1
            if g_step == 0:
                tf.summary.trace_on()
            loss, acc, vloss, vacc = train_step(bx, by, x_val, y_val)
            with fw.as_default():
                if g_step == 0:
                    tf.summary.trace_export('autograph', step=0)
                tf.summary.scalar('MyBinClf loss', loss, g_step)
                tf.summary.scalar('MyBinClf acc', acc, g_step)
            if i % group == 0:
                print(f'g_step#{g_step + 1}: epoch#{epoch + 1}: batch#{i + 1}: [minimize] loss = {loss}, acc = {acc}, vloss = {vloss}, vacc = {vacc}')

            fw.flush()
        if i % group != 0:
            print(f'g_step#{g_step + 1}: epoch#{epoch + 1}: batch#{i + 1}: loss = {loss}, acc = {acc}, vloss = {vloss}, vacc = {vacc}')


train(n_epoch)


def visualize_pred(x, title):
    global spn
    spn += 1
    plt.subplot(spr, spc, spn)
    h = model(x)
    h = tf.reshape(h, [-1])
    # idx_pos = tf.cast(tf.where(h > 0.5, 1., 0.), tf.bool)
    idx_pos = h > 0.5
    idx_neg = ~idx_pos

    plt.scatter(x[idx_pos, 0], x[idx_pos, 1], s=1, color='r', label='positive')
    plt.scatter(x[idx_neg, 0], x[idx_neg, 1], s=1, color='b', label='negative')
    plt.legend()
    plt.title(title)


visualize_pred(x_train, 'train')
visualize_pred(x_test, 'test')
visualize_pred(x_val, 'val')
