maze
/

File size: 3,756 Bytes
2d50a8b
 
 
f71ae19
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
---

license: mit
---

Model weight for Fast Style Transfer

```



class TransformerNetwork(nn.Module):

    """Feedforward Transformation Network without Tanh

    reference: https://arxiv.org/abs/1603.08155 

    exact architecture: https://cs.stanford.edu/people/jcjohns/papers/fast-style/fast-style-supp.pdf

    """

    def __init__(self, tanh_multiplier=None):

        super(TransformerNetwork, self).__init__()

        self.ConvBlock = nn.Sequential(

            ConvLayer(3, 32, 9, 1),

            nn.ReLU(),

            ConvLayer(32, 64, 3, 2),

            nn.ReLU(),

            ConvLayer(64, 128, 3, 2),

            nn.ReLU()

        )

        self.ResidualBlock = nn.Sequential(

            ResidualLayer(128, 3), 

            ResidualLayer(128, 3), 

            ResidualLayer(128, 3), 

            ResidualLayer(128, 3), 

            ResidualLayer(128, 3)

        )

        self.DeconvBlock = nn.Sequential(

            DeconvLayer(128, 64, 3, 2, 1),

            nn.ReLU(),

            DeconvLayer(64, 32, 3, 2, 1),

            nn.ReLU(),

            ConvLayer(32, 3, 9, 1, norm="None")

        )

        self.tanh_multiplier = tanh_multiplier



    def forward(self, x):

        x = self.ConvBlock(x)

        x = self.ResidualBlock(x)

        x = self.DeconvBlock(x)

        if isinstance(self.tanh_multiplier, int):

            x = self.tanh_multiplier * F.tanh(x)

        return x



class ConvLayer(nn.Module):

    def __init__(self, in_channels, out_channels, kernel_size, stride, norm="instance"):

        super(ConvLayer, self).__init__()

        # Padding Layers

        padding_size = kernel_size // 2

        self.pad = nn.ReflectionPad2d(padding_size)



        # Convolution Layer

        self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride)



        # Normalization Layers

        if norm == "instance":

            self.norm = nn.InstanceNorm2d(out_channels, affine=True)

        elif norm == "batch":

            self.norm = nn.BatchNorm2d(out_channels, affine=True)

        else:

            self.norm = nn.Identity()



    def forward(self, x):

        x = self.pad(x)

        x = self.conv(x)

        x = self.norm(x)

        return x



class ResidualLayer(nn.Module):

    """

    Deep Residual Learning for Image Recognition

    https://arxiv.org/abs/1512.03385

    """

    def __init__(self, channels=128, kernel_size=3):

        super(ResidualLayer, self).__init__()

        self.conv1 = ConvLayer(channels, channels, kernel_size, stride=1)

        self.relu = nn.ReLU()

        self.conv2 = ConvLayer(channels, channels, kernel_size, stride=1)



    def forward(self, x):

        identity = x                     # preserve residual

        out = self.relu(self.conv1(x))   # 1st conv layer + activation

        out = self.conv2(out)            # 2nd conv layer

        out = out + identity             # add residual

        return out



class DeconvLayer(nn.Module):

    def __init__(self, in_channels, out_channels, kernel_size, stride, output_padding, norm="instance"):

        super(DeconvLayer, self).__init__()



        # Transposed Convolution 

        padding_size = kernel_size // 2

        self.conv_transpose = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding_size, output_padding)



        # Normalization Layers

        if norm == "instance":

            self.norm = nn.InstanceNorm2d(out_channels, affine=True)

        elif norm == "batch":

            self.norm = nn.BatchNorm2d(out_channels, affine=True)

        else:

            self.norm = nn.Identity()



    def forward(self, x):

        x = self.conv_transpose(x)

        out = self.norm(x)

        return out

```