File size: 1,339 Bytes
69c590e
fae5def
69c590e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import tensorflow as tf
from tensorflow.keras.layers import Layer, Dense


def sin_activation(x, omega=30):
    return tf.math.sin(omega * x)


class AdaIN(Layer):
    def __init__(self, **kwargs):
        super(AdaIN, self).__init__(**kwargs)

    def build(self, input_shapes):
        x_shape = input_shapes[0]
        w_shape = input_shapes[1]

        self.w_channels = w_shape[-1]
        self.x_channels = x_shape[-1]

        self.dense_1 = Dense(self.x_channels)
        self.dense_2 = Dense(self.x_channels)

    def call(self, inputs):
        x, w = inputs
        ys = tf.reshape(self.dense_1(w), (-1, 1, 1, self.x_channels))
        yb = tf.reshape(self.dense_2(w), (-1, 1, 1, self.x_channels))
        return ys * x + yb

    def get_config(self):
        config = {
            #'w_channels': self.w_channels,
            #'x_channels': self.x_channels
        }
        base_config = super(AdaIN, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))


class AdaptiveAttention(Layer):

    def __init__(self, **kwargs):
        super(AdaptiveAttention, self).__init__(**kwargs)

    def call(self, inputs):
        m, a, i = inputs
        return (1 - m) * a + m * i

    def get_config(self):
        base_config = super(AdaptiveAttention, self).get_config()
        return base_config