Spaces:
Sleeping
Sleeping
haikookhandor
commited on
Commit
•
4003f83
1
Parent(s):
622fec9
Add application file
Browse files
app.py
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import jax.numpy as jnp
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
import numpyro
|
5 |
+
import numpyro.distributions as dist
|
6 |
+
from numpyro.infer import MCMC, NUTS
|
7 |
+
from sklearn.datasets import make_regression
|
8 |
+
from jax import random
|
9 |
+
import streamlit as st
|
10 |
+
|
11 |
+
|
12 |
+
# Define the model
|
13 |
+
def linear_regression(X, y, alpha_prior, beta_prior, sigma_prior):
|
14 |
+
alpha = numpyro.sample('alpha', alpha_prior)
|
15 |
+
beta = numpyro.sample('beta', beta_prior)
|
16 |
+
sigma = numpyro.sample('sigma', sigma_prior)
|
17 |
+
mean = alpha + beta * X
|
18 |
+
numpyro.sample('obs', dist.Normal(mean, sigma), obs=y)
|
19 |
+
|
20 |
+
|
21 |
+
def run_linear_regression(X, y, alpha_prior, beta_prior, sigma_prior):
|
22 |
+
# Run MCMC
|
23 |
+
rng_key = random.PRNGKey(0)
|
24 |
+
nuts_kernel = NUTS(linear_regression)
|
25 |
+
mcmc = MCMC(nuts_kernel, num_warmup=50, num_samples=1000)
|
26 |
+
mcmc.run(rng_key, jnp.array(X), jnp.array(y), alpha_prior=alpha_prior, beta_prior=beta_prior, sigma_prior=sigma_prior)
|
27 |
+
|
28 |
+
mcmc.print_summary()
|
29 |
+
|
30 |
+
# Get posterior samples
|
31 |
+
samples = mcmc.get_samples()
|
32 |
+
|
33 |
+
# Plot the results
|
34 |
+
fig, ax = plt.subplots(1, 3, figsize=(12, 4))
|
35 |
+
ax[0].hist(samples['alpha'], bins=20, density=True)
|
36 |
+
ax[0].set_title('alpha')
|
37 |
+
ax[1].hist(samples['beta'], bins=20, density=True)
|
38 |
+
ax[1].set_title('beta')
|
39 |
+
ax[2].hist(samples['sigma'], bins=20, density=True)
|
40 |
+
ax[2].set_title('sigma')
|
41 |
+
st.write("The plot of posterior samples is shown below:")
|
42 |
+
st.pyplot(fig)
|
43 |
+
st.write("The mean of alpha is", np.mean(samples['alpha']))
|
44 |
+
st.write("The mean of beta is", np.mean(samples['beta']))
|
45 |
+
st.write("The mean of sigma is", np.mean(samples['sigma']))
|
46 |
+
|
47 |
+
st.write("The plot of predicted line is shown below using mean values of alpha and beta (Xβ+α):")
|
48 |
+
|
49 |
+
|
50 |
+
fig, ax = plt.subplots(figsize=(8, 6))
|
51 |
+
ax.scatter(X, y, color='blue', alpha=0.5, label='data')
|
52 |
+
light_color = (1.0, 0.5, 0.5, 0.7)
|
53 |
+
for i in range(499):
|
54 |
+
alpha_i = samples['alpha'][i]
|
55 |
+
beta_i = samples['beta'][i]
|
56 |
+
ax.plot(X, alpha_i + beta_i * X, color=light_color)
|
57 |
+
alpha_i = samples['alpha'][498]
|
58 |
+
beta_i = samples['beta'][498]
|
59 |
+
ax.plot(X, alpha_i + beta_i * X, color=light_color,label='MCMC samples')
|
60 |
+
ax.plot(X, np.mean(samples['alpha']) + np.mean(samples['beta']) * X, color='red', label='mean')
|
61 |
+
ax.legend(loc='upper left')
|
62 |
+
st.pyplot(fig)
|
63 |
+
|
64 |
+
# User Input
|
65 |
+
st.write("# Bayesian Linear Regression")
|
66 |
+
|
67 |
+
|
68 |
+
""" Prior: p(α) = N(μ0,Σ0), p(β) = N(μ1,Σ1), p(σ) = N(Σ2)"""
|
69 |
+
""" Likelihood: p(y|X,β,α,σ) = N(Xβ+α,σ)"""
|
70 |
+
""" Posterior: p(α|X,y) ∝ p(y|X,β,σ)p(α), p(β|X,y) ∝ p(y|X,α,σ)p(β), p(σ|X,y) ∝ p(y|X,β,α)p(σ)"""
|
71 |
+
|
72 |
+
|
73 |
+
|
74 |
+
alpha_prior_option = st.selectbox("Choose an option for alpha prior:", ["Normal", "Laplace", "Cauchy"])
|
75 |
+
if alpha_prior_option == "Normal":
|
76 |
+
alpha_loc = st.slider("Select a mean value for prior of alpha(α) (μ0)", -10.0, 10.0, 0.0, 0.1)
|
77 |
+
alpha_scale = st.slider("Select a standard deviation value for prior of alpha(α) (Σ0)", 0.01, 10.0, 1.0, 0.1)
|
78 |
+
alpha_prior = dist.Normal(alpha_loc, alpha_scale)
|
79 |
+
elif alpha_prior_option == "Laplace":
|
80 |
+
alpha_loc = st.slider("Select a mean value for prior of alpha(α) (μ0)", -10.0, 10.0, 0.0, 0.1)
|
81 |
+
alpha_scale = st.slider("Select a standard deviation value for prior of alpha(α) (Σ0)", 0.01, 10.0, 1.0, 0.1)
|
82 |
+
alpha_prior = dist.Laplace(alpha_loc, alpha_scale)
|
83 |
+
elif alpha_prior_option == "Cauchy":
|
84 |
+
alpha_loc = st.slider("Select a mean value for prior of alpha(α) (μ0)", -10.0, 10.0, 0.0, 0.1)
|
85 |
+
alpha_scale = st.slider("Select a standard deviation value for prior of alpha(α) (Σ0)", 0.01, 10.0, 1.0, 0.1)
|
86 |
+
alpha_prior = dist.Cauchy(alpha_loc, alpha_scale)
|
87 |
+
|
88 |
+
beta_prior_option = st.selectbox("Choose an option for beta prior:", ["Normal", "Laplace", "Cauchy"])
|
89 |
+
if beta_prior_option == "Normal":
|
90 |
+
beta_loc = st.slider("Select a mean value for prior of beta(β) (μ1)", -10.0, 10.0, 0.0, 0.1)
|
91 |
+
beta_scale = st.slider("Select a standard deviation value for prior of beta(β) (Σ1)", 0.01, 10.0, 1.0, 0.1)
|
92 |
+
beta_prior = dist.Normal(beta_loc, beta_scale)
|
93 |
+
elif beta_prior_option == "Laplace":
|
94 |
+
beta_loc = st.slider("Select a mean value for prior of beta(β) (μ1)", -10.0, 10.0, 0.0, 0.1)
|
95 |
+
beta_scale = st.slider("Select a standard deviation value for prior of beta(β) (Σ1)", 0.01, 10.0, 1.0, 0.1)
|
96 |
+
beta_prior = dist.Laplace(beta_loc, beta_scale)
|
97 |
+
elif beta_prior_option == "Cauchy":
|
98 |
+
beta_loc = st.slider("Select a mean value for prior of beta(β) (μ1)", -10.0, 10.0, 0.0, 0.1)
|
99 |
+
beta_scale = st.slider("Select a standard deviation value for prior of beta(β) (Σ1)", 0.01, 10.0, 1.0, 0.1)
|
100 |
+
beta_prior = dist.Cauchy(beta_loc, beta_scale)
|
101 |
+
|
102 |
+
sigma_prior_option = st.selectbox("Choose an option for sigma prior:", ["HalfNormal", "HalfCauchy"])
|
103 |
+
if sigma_prior_option == "HalfNormal":
|
104 |
+
sigma_scale = st.slider("Select a scale value for prior of sigma(σ) (Σ2)", 0.01, 10.0, 1.0, 0.1)
|
105 |
+
sigma_prior = dist.HalfNormal(sigma_scale)
|
106 |
+
elif sigma_prior_option == "HalfCauchy":
|
107 |
+
sigma_scale = st.slider("Select a scale value for prior of sigma(σ) (Σ2)", 0.01, 10.0, 1.0, 0.1)
|
108 |
+
sigma_prior = dist.HalfCauchy(sigma_scale)
|
109 |
+
|
110 |
+
rng_key = random.PRNGKey(0)
|
111 |
+
X, y = make_regression(n_samples=50, n_features=1, noise=10.0, random_state=0)
|
112 |
+
X = X.reshape(50)
|
113 |
+
|
114 |
+
if alpha_prior and beta_prior and sigma_prior:
|
115 |
+
run_linear_regression(X, y, alpha_prior, beta_prior, sigma_prior)
|