import mindspore.nn as nn
import mindspore.common.initializer as init


class DnCNN(nn.Cell):
    def __init__(self, depth=17, n_channels=64, image_channels=1, kernel_size=3):
        super(DnCNN, self).__init__()
        padding = 1
        layers = []
        layers.append(nn.Conv2d(in_channels=image_channels, out_channels=n_channels,
                                kernel_size=kernel_size, padding=padding, has_bias=True, pad_mode='pad'))
        layers.append(nn.ReLU())
        for _ in range(0,depth-2):
            layers.append(nn.Conv2d(in_channels=n_channels, out_channels=n_channels,
                                    kernel_size=kernel_size, padding=padding, has_bias=False, pad_mode='pad'))
            layers.append(nn.BatchNorm2d(n_channels, eps=0.0001, momentum=0.05))
            layers.append(nn.PReLU())
        layers.append(nn.Conv2d(in_channels=n_channels, out_channels=image_channels,
                                kernel_size=kernel_size, padding=padding, has_bias=False, pad_mode='pad'))
        self.dncnn = nn.SequentialCell(*layers)
        self._init_weights()

    def construct(self, x):
        x = self.dncnn(x)
        return x

    # private function
    def _init_weights(self):
        print("init weights begin:")
        for _, cell in self.cells_and_names():
            if isinstance(cell, nn.Conv2d):
                cell.weight_init = init.HeNormal(negative_slope=0, mode='fan_in', nonlinearity='leaky_relu')
                if cell.bias is not None:
                    cell.bias_init = init.Zero()
            elif isinstance(cell, nn.BatchNorm2d):
                cell.weight_init = init.One()
                cell.bias_init = init.Zero()