File size: 3,556 Bytes
c2f467a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
import tensorflow_probability as tfp

def load_bnn_model():
    FEATURE_NAMES = [
        "fixed acidity",
        "volatile acidity",
        "citric acid",
        "residual sugar",
        "chlorides",
        "free sulfur dioxide",
        "total sulfur dioxide",
        "density",
        "pH",
        "sulphates",
        "alcohol",
    ]

    hidden_units=[8,8]
    learning_rate =  0.001
    def create_model_inputs():
        inputs = {}
        for feature_name in FEATURE_NAMES:
            inputs[feature_name] = layers.Input(
                name=feature_name, shape=(1,), dtype=tf.float32
            )
        return inputs

    # Define the prior weight distribution as Normal of mean=0 and stddev=1.
    # Note that, in this example, the we prior distribution is not trainable,
    # as we fix its parameters.
    def prior(kernel_size, bias_size, dtype=None):
        n = kernel_size + bias_size
        prior_model = keras.Sequential(
            [
                tfp.layers.DistributionLambda(
                    lambda t: tfp.distributions.MultivariateNormalDiag(
                        loc=tf.zeros(n), scale_diag=tf.ones(n)
                    )
                )
            ]
        )
        return prior_model


    # Define variational posterior weight distribution as multivariate Gaussian.
    # Note that the learnable parameters for this distribution are the means,
    # variances, and covariances.
    def posterior(kernel_size, bias_size, dtype=None):
        n = kernel_size + bias_size
        posterior_model = keras.Sequential(
            [
                tfp.layers.VariableLayer(
                    tfp.layers.MultivariateNormalTriL.params_size(n), dtype=dtype
                ),
                tfp.layers.MultivariateNormalTriL(n),
            ]
        )
        return posterior_model

    def create_probablistic_bnn_model(train_size):
        inputs = create_model_inputs()
        features = keras.layers.concatenate(list(inputs.values()))
        features = layers.BatchNormalization()(features)

        # Create hidden layers with weight uncertainty using the DenseVariational layer.
        for units in hidden_units:
            features = tfp.layers.DenseVariational(
                units=units,
                make_prior_fn=prior,
                make_posterior_fn=posterior,
                kl_weight=1 / train_size,
                activation="sigmoid",
            )(features)

        # Create a probabilistic output (Normal distribution), and use the `Dense` layer
        # to produce the parameters of the distribution.
        # We set units=2 to learn both the mean and the variance of the Normal distribution.
        distribution_params = layers.Dense(units=2)(features)
        outputs = tfp.layers.IndependentNormal(1)(distribution_params)

        model = keras.Model(inputs=inputs,
                            outputs=outputs)

        return model

    def negative_loglikelihood(targets, estimated_distribution):
        estimated_distirbution = tfp.distributions.MultivariateNormalTriL(estimated_distribution)
        return -estimated_distribution.log_prob(targets)

    model = create_probablistic_bnn_model(4163)
    model.compile(
            optimizer=keras.optimizers.RMSprop(learning_rate=learning_rate),
            loss=negative_loglikelihood,
            metrics=[keras.metrics.RootMeanSquaredError()],
        )
    model.load_weights('bnn_wine_model.h5')
    return model