import random
import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import LinearRegression, Lasso, ElasticNet, Ridge
from sklearn.preprocessing import PolynomialFeatures
from sklearn.metrics import r2_score, mean_absolute_error
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import ShuffleSplit
from sklearn.model_selection import LeaveOneOut
from matplotlib.pyplot import MultipleLocator

font = {'family': 'Arial', 'size' : 16}
plt.rc('font', **font)

plt.rcParams['mathtext.fontset'] = 'custom'
plt.rcParams['mathtext.it'] = 'Arial:italic'
plt.rcParams['mathtext.rm'] = 'Arial'
plt.rcParams['pdf.fonttype'] = 42

# Fixing random state for reproducibility
np.random.seed(19680801)

x = [[30.47, 10.18, 20.56, 34.92, 32.49, 24.12, 32.89, 32.87],
     [27.11, 6.25, 6.44, 34.18, 7.95, 10.04, 29.50,
         27.40, 34.34, 10.02, 10.19, 32.14, 6.13, 25.84],
     [31.09, 10.32, 25.68, 10.41, 6.83, 6.06, 15.76, 22.22, 11.15, 28.26,
         16.45, 6.36, 5.92, 30.44, 24.00, 22.66, 6.66, 21.98, 9.59],
     [6.23, 20.47, 25.64, 32.45, 6.09, 23.77, 28.12, 32.26, 19.71, 29.03, 6.21, 17.35, 23.28, 10.10]]  # SBVCO

y = [[0.99, 1.37, 1.04, 0.86, 1.00, 1.08, 0.97, 0.78],
     [1.56, 2.52, 1.86, 0.72, 2.25, 2.22, 1.14,
         1.09, 0.63, 1.10, 1.98, 0.61, 2.57, 1.51],
     [0.812, 1.935, 1.524, 1.574, 2.516, 2.599, 1.42, 1.97, 1.54, 0.872,
      2.034, 2.891, 3.017, 0.835, 1.013, 1.421, 2.967, 0.892, 2.63],
     [3.64, 1.22, 1.37, 1.12, 3.61, 1.58, 0.83, 0.66, 2.43, 1.11, 3.39, 1.81, 1.68, 2.49]]  # Ea

CFe = [0, 0.33, 0.4, 0.5]
charge = [0, -0.398, -0.459, -0.582]

X, Y = [], []
for i in range(4):
    for j in range(len(x[i])):
        X.append([x[i][j], charge[i]])
        # X.append([x[i][j], CFe[i]])
        Y.append(y[i][j])

# Shuffle two lists with same order
# Using zip() + * operator + shuffle()
temp = list(zip(X, Y))
random.shuffle(temp)
X, Y = zip(*temp)
# res1 and res2 come out as tuples, and so must be converted to lists.
X, Y = list(X), list(Y)

poly = PolynomialFeatures(2, interaction_only=False)
X_2 = poly.fit_transform(X)
poly = PolynomialFeatures(3, interaction_only=False)
X_3 = poly.fit_transform(X)
print(X_3)
Xs = [X_2, X_3] 
# X_train, X_test = X[:int(0.8*len(X))], X[int(0.8*len(X)):]
# Y_train, Y_test = Y[:int(0.8*len(Y))], Y[int(0.8*len(Y)):]

# X_train = X
# X_test = X
# Y_train = Y
# Y_test = Y
abcdefgh = [["(a)", "(b)", "(c)", "(d)"], ["(e)", "(f)", "(g)", "(h)"]]
fig, axs = plt.subplots(2, 4, constrained_layout=True, sharey=True, sharex=True, figsize=(12, 6))

# axs[0][0].text(-2, 3.6, "test")

for i in range(2):
    for j in range(4):
        axs[i][j].plot([0, 4], [0, 4], c="black", linestyle='-.')
        axs[i][j].set_xlim(0, 4)
        axs[i][j].xaxis.set_major_locator(MultipleLocator(1))
        axs[i][j].set_ylim(0, 4)
        axs[i][j].text(0.05, 3.6, abcdefgh[i][j])
fig.supxlabel(r"$True\ E_a (eV)$")
fig.supylabel(r"$Predicted\ E_a (eV)$")

# axs[0][0].set_ylabel(r"$(a)-(d)\\ Degree-2 polynomial features$")
# axs[1][0].set_ylabel(r"$Predicted\ E_a (eV)$")
# axs[1][0].set_xlabel(r"$True\ E_a (eV)$")
# axs[1][1].set_xlabel(r"$True\ E_a (eV)$")
# axs[1][2].set_xlabel(r"$True\ E_a (eV)$")
# axs[1][3].set_xlabel(r"$True\ E_a (eV)$")
# LR = LinearRegression()
# This waste too much data, so we use leave_one_our finally.
# cv = ShuffleSplit(n_splits=5, test_size=0.2, random_state=1)
# s = cross_val_score(LR, X, Y, cv=cv, scoring='r2')
# print(s)
for row in range(2):
    loo = LeaveOneOut()
    Y_true, Y_pred = [], []
    for i, (train_index, test_index) in enumerate(loo.split(Xs[row])):
        X_train = np.array(Xs[row])[train_index]
        X_test = np.array(Xs[row])[test_index]
        Y_train = np.array(Y)[train_index]
        Y_test = np.array(Y)[test_index]
        LR = LinearRegression()
        y_pred = LR.fit(X_train, Y_train).predict(X_test)
        Y_true.append(Y_test)
        Y_pred.append(y_pred)
    
    # r2_score_lr = r2_score(Y_true, Y_pred)
    r2_score_lr = mean_absolute_error(Y_true, Y_pred)
    axs[row][2].scatter(Y_true, Y_pred)
    print(LR)
    print("r^2 on test data degree %i : %f" %(row+2, r2_score_lr))
    axs[row][2].text(1.7, 0.2, "MAE: %.3f eV" %r2_score_lr)
    axs[row][2].text(0.5, 3.6, "Ordinary")

for row in range(2):
    loo = LeaveOneOut()
    Y_true, Y_pred = [], []
    for i, (train_index, test_index) in enumerate(loo.split(Xs[row])):
        X_train = np.array(Xs[row])[train_index]
        X_test = np.array(Xs[row])[test_index]
        Y_train = np.array(Y)[train_index]
        Y_test = np.array(Y)[test_index]
        ridge = Ridge(alpha=.1)
        y_pred = ridge.fit(X_train, Y_train).predict(X_test)
        Y_true.append(Y_test)
        Y_pred.append(y_pred)
    
    # r2_score_ridge = r2_score(Y_true, Y_pred)
    r2_score_ridge = mean_absolute_error(Y_true, Y_pred)
    axs[row][3].scatter(Y_true, Y_pred)
    print(ridge)
    print("r^2 on test data degree %i : %f" %(row+2, r2_score_ridge))
    axs[row][3].text(1.7, 0.2, "MAE: %.3f eV" %r2_score_ridge)
    axs[row][3].text(0.5, 3.6, "Ridge")

# LR = LinearRegression()
y_pred_lr = LR.fit(X_2, Y).predict(X_2)
# r2_score_lr = r2_score(Y, y_pred_lr)
r2_score_lr = mean_absolute_error(Y, y_pred_lr)
axs[0][0].scatter(Y, y_pred_lr)
axs[0][0].text(1.7, 0.2, "MAE: %.3f eV" %r2_score_lr)
axs[0][0].text(0.5, 3.6, "Ordinary")
print(LR)
print("r^2 on test data : %f" % r2_score_lr)

y_pred_lr = LR.fit(X_3, Y).predict(X_3)
# r2_score_lr = r2_score(Y, y_pred_lr)
r2_score_lr = mean_absolute_error(Y, y_pred_lr)
axs[1][0].scatter(Y, y_pred_lr)
axs[1][0].text(1.7, 0.2, "MAE: %.3f eV" %r2_score_lr)
axs[1][0].text(0.5, 3.6, "Ordinary")
print(LR)
print("r^2 on test data : %f" % r2_score_lr)

# alpha = 0.1
# lasso = Lasso(alpha=alpha)
# y_pred_lasso = lasso.fit(X_train, Y_train).predict(X_test)
# r2_score_lasso = r2_score(Y_test, y_pred_lasso)
# axs[0][1].scatter(Y_test, y_pred_lasso)
# print(lasso)
# print("r^2 on test data : %f" % r2_score_lasso)

# enet = ElasticNet(alpha=alpha, l1_ratio=0.7)
# y_pred_enet = enet.fit(X_train, Y_train).predict(X_test)
# r2_score_enet = r2_score(Y_test, y_pred_enet)
# axs[1][0].scatter(Y_test, y_pred_enet)
# print(enet)
# print("r^2 on test data : %f" % r2_score_enet)

ridge = Ridge(alpha=.5)
y_pred_ridge = ridge.fit(X_2, Y).predict(X_2)
# r2_score_ridge = r2_score(Y, y_pred_ridge)
r2_score_ridge = mean_absolute_error(Y, y_pred_ridge)
axs[0][1].scatter(Y, y_pred_ridge)
axs[0][1].text(1.7, 0.2, "MAE: %.3f eV" %r2_score_ridge)
axs[0][1].text(0.5, 3.6, "Ridge")
print(ridge)
print("r^2 on test data : %f" % r2_score_ridge)

ridge = Ridge(alpha=.5)
y_pred_ridge = ridge.fit(X_3, Y).predict(X_3)
# r2_score_ridge = r2_score(Y, y_pred_ridge)
r2_score_ridge = mean_absolute_error(Y, y_pred_ridge)
axs[1][1].scatter(Y, y_pred_ridge)
axs[1][1].text(1.7, 0.2, "MAE: %.3f eV" %r2_score_ridge)
axs[1][1].text(0.5, 3.6, "Ridge")
print(ridge)
print("r^2 on test data : %f" % r2_score_ridge)

plt.show()
