import numpy as np
import mindspore as ms
from mindspore import nn, dataset, ops

ms.context.set_context(device_target = "CPU", mode = ms.PYNATIVE_MODE)


class Train(nn.Cell):
    def __init__(self,net, optim, auto_prefix=True, flags=None):
        super().__init__(auto_prefix, flags)

class Net(nn.Cell):
    def __init__(self, auto_prefix=True, flags=None):
        super().__init__(auto_prefix, flags)
        self.fc = nn.Dense(3, 2)

        ## y = a1x1 + a2x2 + a3x3 + b

    def construct(self, *inputs):
        x = self.fc(*inputs)

        return x

class Myset:
    def __init__(self) -> None:
        self.x = np.random.rand(1000,3)*10 + np.random.rand(1000, 3)
        self.x = self.x.astype(np.float32)

        self.weights = np.array([[1.3, 2.5, 6.6],[3.2, 3.3, 4.4]])
        self.b = np.array([1.7, 2.2])

        self.y = (self.x @ self.weights.T + self.b).astype(np.float32)
        

    def __len__(self):
        return self.x.shape[0]

    def __getitem__(self, idx):
        return self.x[idx], self.y[idx]


sets = dataset.GeneratorDataset(Myset(), column_names=['x', 'y'], shuffle=True).batch(64)

net = Net()

loss_fn = nn.MSELoss()
optims = nn.optim.SGD(net.trainable_params(), learning_rate=1e-3, momentum=0.9)

model = ms.Model(net, loss_fn, optims, metrics={"MAE":nn.MAE()})



if __name__ == '__main__':
   model.train(1000, train_dataset=sets, callbacks=ms.LossMonitor(50))

    




