Edit model card

Model weight for Fast Style Transfer


class TransformerNetwork(nn.Module):
    """Feedforward Transformation Network without Tanh
    reference: https://arxiv.org/abs/1603.08155 
    exact architecture: https://cs.stanford.edu/people/jcjohns/papers/fast-style/fast-style-supp.pdf
    """
    def __init__(self, tanh_multiplier=None):
        super(TransformerNetwork, self).__init__()
        self.ConvBlock = nn.Sequential(
            ConvLayer(3, 32, 9, 1),
            nn.ReLU(),
            ConvLayer(32, 64, 3, 2),
            nn.ReLU(),
            ConvLayer(64, 128, 3, 2),
            nn.ReLU()
        )
        self.ResidualBlock = nn.Sequential(
            ResidualLayer(128, 3), 
            ResidualLayer(128, 3), 
            ResidualLayer(128, 3), 
            ResidualLayer(128, 3), 
            ResidualLayer(128, 3)
        )
        self.DeconvBlock = nn.Sequential(
            DeconvLayer(128, 64, 3, 2, 1),
            nn.ReLU(),
            DeconvLayer(64, 32, 3, 2, 1),
            nn.ReLU(),
            ConvLayer(32, 3, 9, 1, norm="None")
        )
        self.tanh_multiplier = tanh_multiplier

    def forward(self, x):
        x = self.ConvBlock(x)
        x = self.ResidualBlock(x)
        x = self.DeconvBlock(x)
        if isinstance(self.tanh_multiplier, int):
            x = self.tanh_multiplier * F.tanh(x)
        return x

class ConvLayer(nn.Module):
    def __init__(self, in_channels, out_channels, kernel_size, stride, norm="instance"):
        super(ConvLayer, self).__init__()
        # Padding Layers
        padding_size = kernel_size // 2
        self.pad = nn.ReflectionPad2d(padding_size)

        # Convolution Layer
        self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride)

        # Normalization Layers
        if norm == "instance":
            self.norm = nn.InstanceNorm2d(out_channels, affine=True)
        elif norm == "batch":
            self.norm = nn.BatchNorm2d(out_channels, affine=True)
        else:
            self.norm = nn.Identity()

    def forward(self, x):
        x = self.pad(x)
        x = self.conv(x)
        x = self.norm(x)
        return x

class ResidualLayer(nn.Module):
    """
    Deep Residual Learning for Image Recognition
    https://arxiv.org/abs/1512.03385
    """
    def __init__(self, channels=128, kernel_size=3):
        super(ResidualLayer, self).__init__()
        self.conv1 = ConvLayer(channels, channels, kernel_size, stride=1)
        self.relu = nn.ReLU()
        self.conv2 = ConvLayer(channels, channels, kernel_size, stride=1)

    def forward(self, x):
        identity = x                     # preserve residual
        out = self.relu(self.conv1(x))   # 1st conv layer + activation
        out = self.conv2(out)            # 2nd conv layer
        out = out + identity             # add residual
        return out

class DeconvLayer(nn.Module):
    def __init__(self, in_channels, out_channels, kernel_size, stride, output_padding, norm="instance"):
        super(DeconvLayer, self).__init__()

        # Transposed Convolution 
        padding_size = kernel_size // 2
        self.conv_transpose = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding_size, output_padding)

        # Normalization Layers
        if norm == "instance":
            self.norm = nn.InstanceNorm2d(out_channels, affine=True)
        elif norm == "batch":
            self.norm = nn.BatchNorm2d(out_channels, affine=True)
        else:
            self.norm = nn.Identity()

    def forward(self, x):
        x = self.conv_transpose(x)
        out = self.norm(x)
        return out
Downloads last month
0
Unable to determine this model's library. Check the docs .

Space using maze/FastStyleTransfer 1