from sklearn import datasets,linear_model
from sklearn import metrics
from sklearn import tree
from sklearn.model_selection import train_test_split
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import cross_val_predict
from sklearn.naive_bayes import MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.neural_network import BernoulliRBM
from sklearn.neural_network import MLPClassifier
from sklearn.neural_network import MLPRegressor
from sklearn.externals import joblib

# 线性回归算法
def mx_line(train_x, train_y):
    mx = LinearRegression()
    mx.fit(train_x,train_y)
    return mx

# 逻辑回归算法
def mx_log(train_x, train_y):
    mx = LogisticRegression()
    mx.fit(train_x,train_y)
    return mx

# 多项式朴素贝叶斯
def mx_bayes(train_x, train_y):
    mx = MultinomialNB()
    mx.fit(train_x,train_y)
    return mx

# KNN 近邻算法
def mx_knn(train_x, train_y):
    mx = KNeighborsClassifier()
    mx.fit(train_x,train_y)
    return mx

# 随机森林算法
def mx_forest(train_x, train_y):
    mx = RandomForestClassifier(n_estimators=8)
    mx.fit(train_x,train_y)
    return mx

# 决策树算法
def mx_tree(train_x, train_y):
    mx = tree.DecisionTreeClassifier()
    mx.fit(train_x,train_y)
    return mx

# GBDT迭代决策树算法
def mx_gbdt(train_x, train_y):
    mx = GradientBoostingClassifier(n_estimators=200)
    mx.fit(train_x,train_y)
    return mx

# SVM 向量机
def mx_svm(train_x, train_y):
    mx = SVC()
    mx.fit(train_x,train_y)
    return mx

# svm-cross向量机交叉算法,自动调参
def mx_svm_cross(train_x, train_y):
    mx = SVC(kernel='rbf', probability=True)
    param_grid = {'C':[1e-3,1e-2,1e-1,1,10,100,1000],
                  'gamma':[0.001,0.0001]}
    grid_search = GridSearchCV(mx,param_grid,n_jobs=1,verbose=1)
    grid_search.fit(train_x,train_y)
    best_parameters = grid_search.best_estimator_.get_params()
    mx = SVC(kernel='rbf',C=best_parameters['C'],gamma=best_parameters['gamma'],probability=True)
    mx.fit(train_x,train_y)
    return mx

# MLP神经网络算法
def mx_MLP(train_x, train_y):
    mx = MLPClassifier()
    mx.fit(train_x,train_y)
    return mx

# MLP神经网络回归算法
def mx_MLP_reg(train_x,train_y):
    mx = MLPRegressor()
    mx.fit(train_x,train_y)
    return mx





