import numpy as np


def get_data(num, w=5., b=3.):
    for _ in range(num):
        x = np.random.uniform(-10., 10.)
        noise = np.random.normal(0, 1)
        y = np.sin(x) * w + b + noise
        yield np.array([x]).astype(np.float32), np.array([y]).astype(np.float32),


import matplotlib.pyplot as plt

train_data = list(get_data(100))
x_target_label = np.arange(-10, 10, .1)
y_target_label = np.sin(x_target_label) * 5 + 3.

print(type(x_target_label), type(train_data))
x_eval_label, y_eval_label = zip(*train_data)
print(type(x_eval_label), "x_eval_label")

# x_eval = np.array(list(x_eval_label))
# y_eval = np.array(list(y_eval_label))

x_eval = np.array(x_eval_label)
y_eval = np.array(y_eval_label)

# plt.axis([-11, 11, -5, 15])
# plt.scatter(x_eval, y_eval, color='red', s=5, label='eval_data')
# plt.plot(x_target_label, y_target_label, color='green', label="target function")
#
# plt.title('eval data')
# plt.legend()
# # plt.show()

from mindspore import dataset as ds


def create_dataset(num_data, batch_size=16, repeat_size=1):
    input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data', 'label'])
    input_data = input_data.batch(batch_size, drop_remainder=True)
    input_data = input_data.repeat(repeat_size)

    return input_data


data_number = 32000
batch_number = 16
repeat_nuber = 1

ds_train = create_dataset(data_number, batch_size=batch_number, repeat_size=repeat_nuber)

step_size = ds_train.get_dataset_size()
dict_datasets = next(ds_train.create_dict_iterator())

print(dict_datasets.keys())
print("x label value shape", dict_datasets['data'].shape)
print('y label value shape:', dict_datasets['label'].shape)

from mindspore import nn
from mindspore.common.initializer import Normal


class NonlinearNet(nn.Cell):
    def __init__(self):
        super(NonlinearNet, self).__init__()
        self.fc1 = nn.Dense(1, 100, Normal(), Normal())
        self.fc2 = nn.Dense(100, 100, Normal(), Normal())
        self.fc3 = nn.Dense(100, 1, Normal(), Normal())
        self.sigmoid = nn.Sigmoid()

    def construct(self, x):
        x = self.fc1(x)
        x = self.sigmoid(x)
        x = self.fc2(x)
        x = self.sigmoid(x)
        x = self.fc2(x)
        x = self.sigmoid(x)
        fx = self.fc3(x)
        return fx


from mindspore import Tensor

net = NonlinearNet()
x_model_label = np.arange(-10., 10, .1).astype(np.float32)
y_model_label = np.zeros_like(x_model_label)

for i in range(len(x_model_label)):
    y_model_label[i] = net(Tensor(np.expand_dims([x_model_label[i]], axis=0))).asnumpy()[0][0]

plt.axis([-11, 11, -5, 15])
plt.scatter(x_eval, y_eval, color='red', s=5, label='eval_data')
plt.plot(x_target_label, y_target_label, color='blue', label="predict function")
plt.plot(x_target_label, y_target_label, color='green', label='target data')

plt.title('untrained')
plt.legend()
# plt.show()

from mindspore import Tensor, Parameter
from mindspore import nn, ops
from mindspore import dtype as mstype

loss = nn.MSELoss()
optimizer_adam = nn.Adam(net.trainable_params())


class MyWithLossCell(nn.Cell):
    def __init__(self, backbone, loss_fn):
        super(MyWithLossCell, self).__init__(auto_prefix=False)
        self.backbone = backbone
        self.loss_fn = loss_fn

    def construct(self, data, label):
        out = self.backbone(data)
        return self.loss_fn(out, label)

    def backbone_network(self):
        return self.backbone


class MyTrainStep(nn.TrainOneStepCell):
    def __init__(self, network, optimizer):
        super(MyTrainStep, self).__init__(network, optimizer)
        self.grad = ops.GradOperation(get_by_list=True)

    def construct(self, data, label):
        weights = self.weights
        loss = self.network(data, label)
        grads = self.grad(self.network, weights)(data, label)
        return loss, self.optimizer(grads),


from IPython import display
import matplotlib.pyplot as plt
import time


def plot_model_and_datasets(net, data, loss):
    data_len = len(data)
    x1, y1 = zip(*data)

    # x_eval = np.array(list(x1[i][0] for i in range(len(x1))))
    x_eval = np.array(list(x1[i][0] for i in range(len(x1))))
    y_eval = np.array((list(y1[i][0] for i in range(len(y1)))))

    x_target = np.arange(-10, 10, 0.1)
    y_target = np.sin(x_target) * 5 + 3
    x_predict = x_eval
    y_predict = np.zeros(data_len)

    for j in range(data_len):
        y_predict[j] = net(Tensor(np.expand_dims(x1[j], axis=0))).asnumpy()[0]

    plt.axis([-11, 11, -5, 15])
    plt.scatter(x_eval, y_eval, color='red', s=5, label='eval data')
    plt.scatter(x_predict, y_predict, color='blue', s=5, label='predict data')
    plt.plot(x_target, y_target, color='green', label='target data')

    plt.title(f'loss:{loss}')
    plt.legend()
    plt.show()
    time.sleep(1)
    display.clear_output(wait=True)


loss_func = loss
opt = optimizer_adam
net_with_criterion = MyWithLossCell(net, loss_func)
train_net = MyTrainStep(net_with_criterion, opt)
epoch = 10

for n in range(epoch):
    for data in ds_train.create_dict_iterator():
        train_net(data['data'], data['label'])
        train_loss = net_with_criterion(data['data'], data['label'])

    plot_model_and_datasets(net_with_criterion.backbone_network(), train_data, train_loss)
