import numpy as np
import pandas as pd
import pymc3 as pm
import matplotlib.pyplot as plt

# 假设我们有一个数据集 data.csv，包含特征列 X1, X2 和目标列 Y
data = pd.read_csv('data1.csv')

# 提取特征和目标变量
X = data[['X1', 'X2']].values
y = data['Y'].values

# 标准化特征
X_mean = X.mean(axis=0)
X_std = X.std(axis=0)
X = (X - X_mean) / X_std

# 定义贝叶斯回归模型
with pm.Model() as model:
    # 先验分布
    beta = pm.Normal('beta', mu=0, sigma=10, shape=X.shape[1])
    intercept = pm.Normal('intercept', mu=0, sigma=10)
    sigma = pm.HalfNormal('sigma', sigma=1)

    # 线性回归模型
    mu = pm.Deterministic('mu', pm.math.dot(X, beta) + intercept)

    # 似然函数
    Y_obs = pm.Normal('Y_obs', mu=mu, sigma=sigma, observed=y)

    # 采样
    trace = pm.sample(2000, tune=1000, cores=2, return_inferencedata=True)

# 结果分析
pm.traceplot(trace)
plt.show()

# 打印后验均值和标准差
print(pm.summary(trace, hdi_prob=0.95))