# coding=utf-8
# encoding:utf-8
"""
author ： duanxxnj@163.com
time : 2016-06-21-09-21
http://blog.csdn.net/daunxx/article/details/51725086
贝叶斯脊回归
这里在一个自己生成的数据集合上测试贝叶斯脊回归

贝叶斯脊回归和最小二乘法(OLS)得到的线性模型的参数是有一定的差别的
相对于最小二乘法(OLS)二样，贝叶斯脊回归得到的参数比较接近于0

贝叶斯脊回归的先验分布是参数向量的高斯分布

"""
import datetime
import time
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from scipy import stats
from scipy.stats import norm

from sklearn.linear_model import BayesianRidge, LinearRegression

###############################################################################
# 随机函数的种子
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures

np.random.seed(int(time.time()) % 100)
# 样本数目为100，特征数目也是100
n_samples, n_features = 100, 100
# 生成高斯分布
X = np.random.randn(n_samples, n_features)
print(X)

# 首先使用alpha为4的先验分布.
alpha_ = 4.
w = np.zeros(n_features)
# 随机提取10个特征出来作为样本特征
relevant_features = np.random.randint(0, n_features, 10)
print("relevant_features", relevant_features)

# 基于先验分布，产生特征对应的初始权值
for i in relevant_features:
    w[i] = stats.norm.rvs(loc=0, scale=1. / np.sqrt(alpha_))

# 产生alpha为50的噪声
alpha_ = 50.
noise = stats.norm.rvs(loc=0, scale=1. / np.sqrt(alpha_), size=n_samples)
# 产生目标数据
y = np.dot(X, w) + noise

###############################################################################
print(X)
print(y)

# 使用贝叶斯脊回归拟合数据
clf = BayesianRidge(compute_score=True)
clf.fit(X, y)

# 使用最小二乘法拟合数据
ols = LinearRegression()
ols.fit(X, y)

###############################################################################
# 作图比较两个方法的结果
# plt.figure(figsize=(6, 5))
# plt.title("Weights of the model")
# plt.plot(clf.coef_, 'b-', label="Bayesian Ridge estimate")
# plt.plot(w, 'g-', label="Ground truth")
# plt.plot(ols.coef_, 'r--', label="OLS estimate 线性回归")
# plt.xlabel("Features")
# plt.ylabel("Values of the weights")
# plt.legend(loc="best", prop=dict(size=12))
# plt.show()

"""
实战测试1
"""
x = np.arange(0, 1, 0.002)
print(x)
y = norm.rvs(0, size=500, scale=0.1)
print(y)

clf = Pipeline([('poly', PolynomialFeatures(degree=100)),
                ('linear', LinearRegression(fit_intercept=False))])
clf.fit(x[:, np.newaxis], y)
x = np.arange(0, 1.004, 0.002)
y_test = clf.predict(x[:, np.newaxis])

# plt.plot(y)
# plt.plot(y_test)
# plt.show()
# exit()

"""
实战测试2
"""
x = np.arange(0, 1, 0.002)
print(x)
y = norm.rvs(0, size=500, scale=0.1)
print(y)

clf = Pipeline([  # ('poly', PolynomialFeatures(degree=10)),
                ('linear', BayesianRidge(compute_score=True, fit_intercept=True))])
clf.fit(x[:, np.newaxis], y)
x = np.arange(0, 1.004, 0.002)
y_test = clf.predict(x[:, np.newaxis])

plt.plot(y)
plt.plot(y_test)
plt.show()
exit(0)

clf = LinearRegression()
clf.fit([[1, 0, 0],
         [1, 1, 1],
         [1, 2, 4],
         [1, 3, 9]],
        [0, 1, 2, 3])
result = clf.predict([[1, -10, 6]])
print(result)

clf = LinearRegression()
clf.fit([[0], [1], [2], [3], [4], [5], [6]],
        [0, 1, 2, 3, 4, 5, 6])
result = clf.predict([[89]])
print(result)



"""
实战演练测试
by flash胜龙
"""

df = pd.read_csv('data/international-airline-passengers.csv')
df.columns = ['ds', 'y']
df['ds'] = list(map(lambda x: datetime.datetime.strptime(x, '%Y-%m'), df['ds'].values.tolist()))
real_data = df['y'].values.tolist()
df = df.head(int(df['ds'].count()*0.67))

X_train = df.index.values.tolist()

X_train = list(map(lambda x: [x], X_train))
print(X_train)
print(type(X_train))
print('=======================================================')

Y_train = df['y']
print(Y_train)
print(Y_train.dtypes)
print(type(Y_train))
# exit(0)

clf = BayesianRidge(compute_score=True)
clf.fit(X_train, Y_train)

predict = []
for i in range(int(95.0/0.67)):
    y_predict = clf.predict([[i]])
    predict.append(y_predict)
# predict = [predict, predict]

# predict = clf.coef_
print(predict)

plt.plot(predict, 'b-', label="Bayesian Ridge estimate")
plt.plot(real_data)
plt.show()
