import mindspore as ms
from mindspore import nn, ops, dataset
import numpy as np
from utils import split_set,MAPE
from mindvision.engine.callback import LossMonitor


ms.context.set_context(device_target='CPU',mode = ms.GRAPH_MODE)

class Block(nn.Cell):
    def __init__(self, in_dim, out_dim, auto_prefix=True, flags=None):
        super().__init__(auto_prefix, flags)
        self.in_dim =in_dim
        self.out_dim =out_dim

        self.conv1 = nn.SequentialCell(
            
            nn.Conv1d(self.in_dim,self.out_dim,3,pad_mode='pad',padding=1),
            nn.ReLU()
            )
        self.conv2 = nn.SequentialCell(
            
            nn.Conv1d(self.out_dim,self.out_dim,3,pad_mode='pad',padding=1),
            nn.ReLU()
            )

        if in_dim != out_dim:
            self.shortcut = nn.Conv1d(self.in_dim, self.out_dim,3,pad_mode='pad',padding=1)
       

    def construct(self, *inputs):
        x = self.conv1(*inputs)
        x = self.conv2(x)
        
        if self.in_dim == self.out_dim:
            x = x + inputs[0]

        else:
            x += self.shortcut(inputs[0])
        
        return x


class ResNet_18(nn.Cell):
    def __init__(self,depth=4, hidden=128, auto_prefix=True, flags=None):
        super().__init__(auto_prefix, flags)
        self.depth = depth
        self.hidden = hidden

        self.rnn = nn.GRU(1, hidden, 1, batch_first = True)
        

        self.conv1 = nn.SequentialCell(
            nn.Conv1d(hidden, 64, 7, 1, 'pad', 3),
            nn.ReLU(),
            )

        self.layer1 = nn.SequentialCell(
            Block(64,64),
            Block(64,64)
        )

        self.layer2 = nn.SequentialCell(
            Block(64,128),
            Block(128,128)
        )

        self.layer3 = nn.SequentialCell(
            Block(128,256),
            Block(256,256)
        )
        self.layer4 = nn.SequentialCell(
            Block(256,512),
            Block(512,512)
        )

        self.pool = nn.AvgPool1d(self.depth)

        self.fc = nn.SequentialCell(
            nn.BatchNorm1d(512),
            nn.Dense(512, 256),
            nn.ReLU(),
            nn.BatchNorm1d(256),
            nn.Dense(256, 128),
            nn.ReLU()
        )

        self.out = nn.Dense(128, 1)

    def construct(self, *inputs):
        x,_ = self.rnn(*inputs)
        x = x.transpose(0,2,1)

        x = self.conv1(x)

        x = self.layer1(x)
        x = self.layer2(x)
        x = self.layer3(x)
        x = self.layer4(x)

        x = self.pool(x)

        x = self.fc(x.squeeze())

        x = self.out(x)

        return x


net = ResNet_18()
loss_fn = nn.MSELoss()

lr_sche = nn.natural_exp_decay_lr(1e-2,0.98,280,28,20,True)
optim = nn.optim.SGD(net.trainable_params(),learning_rate = lr_sche, momentum=0.9)

model = ms.Model(net,loss_fn,optim,metrics={"MAE":nn.MAE(),"MAPE":MAPE()})

if __name__ == "__main__":
    train,test = split_set()
    sets = {"train": dataset.GeneratorDataset(train, ['data', 'target'], shuffle=True).batch(256),
        "test": dataset.GeneratorDataset(test, ['data', 'target']).batch(256)}

    model.train(10,sets['train'],callbacks=LossMonitor(10))

    out = model.eval(sets['test'],callbacks=LossMonitor(10))


    


        
        
