from torch import nn as nn
from torch.nn import functional as F

from mynn.utils.registry import MODEL_REGISTRY
from .model_util import make_layer, default_init_weights, ResidualBlockNoBN


@MODEL_REGISTRY.register()
class FineNet(nn.Module):

    def __init__(self, num_in_ch=6, num_out_ch=3, num_feat=64):
        super(FineNet, self).__init__()
        self.conv_first = nn.Conv2d(num_in_ch, num_feat, 3, 1, 1)
        self.conv_middle = nn.Conv2d(num_feat, num_feat, 3, 1, 1)
        self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1)

        # activation function
        self.lrelu = nn.LeakyReLU(negative_slope=0.1)

        # initialization
        default_init_weights([self.conv_first, self.conv_middle, self.conv_last], 0.1)

    def forward(self, x):
        feat = self.lrelu(self.conv_first(x))
        out = self.conv_middle(feat)
        out = self.conv_last(self.lrelu(out))
        return out
