import sys
sys.path.append('..')
import iris_data
import numpy as np
from sklearn.model_selection import train_test_split

# our target is to max p(y|x) 
# which equals to p(x, y) / p(x) 
# when we do the prediction, the x was given
# so p(y|x) is proportional to p(x, y) = p(x|y)*p(y)
# we get p(x|y), p(y) by MLE
# y ~ bernouli(p) p = sum(y) / len(y)
# x ~ N(u, kesi)
# p(x|y=1) ~ N(u1, kesi) 
# p(x|y=0) ~ N(u2, kesi)




def parameter_diff_cov(X, y):
    """
    input x_train, y_train,
    output:
        p_y: the probablity of y, by MLE
        u1: p(x|y=1) ~ N(u1, kesi), the mean of dataset which label = 1
        u2: p(x|y=0) ~ N(u2, kesi), the mean of dataset which label = 0
    """
    p_y = sum(y) / len(y)
    #N1 means y = 1
    n1 = np.where(y == 1)[0]
    #n2 means y = 0
    n2 = np.where(y == 0)[0]
    u1 = X[n1, :].mean(axis=0)
    u2 = X[n2, :].mean(axis=0)
    #we set n1, n2 have different covariance
    kesi1 = (X[n1, :] - u1).T.dot((X[n1, :] - u1))
    kesi2 = (X[n2, :] - u2).T.dot((X[n2, :] - u2))
    
    return p_y, u1, u2, kesi1, kesi2


def p_y_x(X_test, p_y, u1, u2, kesi1, kesi2):
    """
    p(y|x) ----> p(x, y) = p(x|y) * p(y)
    p(x|y) ~ N(u, kesi)
    
    """
    px_y1 = p_y * 1 / np.power(np.linalg.det(kesi1), 1/2) * \
            np.exp(-1/2 * (X_test - u1).T.dot(kesi1).dot(X_test - u1))
    
    px_y0 = (1 - p_y) / np.power(np.linalg.det(kesi2), 1/2) * \
            np.exp(-1/2 * (X_test - u2).T.dot(kesi2).dot(X_test - u2))
    
    if px_y1 > px_y0:
        return 1
    else:
        return 0
    

def parameter_same_cov(X, y):
    """
    like above but we suppose X with same kesi
    """
    p_y = sum(y) / len(y)
    #N1 means y = 1
    n1 = np.where(y == 1)[0]
    #n2 means y = 0
    n2 = np.where(y == 0)[0]
    u1 = X[n1,:].mean(axis=0)
    u2 = X[n2,:].mean(axis=0)
    #we set n1, n2 have same covariance(ddl)
    kesi1 = (X[n1, :] - u1).T.dot((X[n1, :] - u1))
    kesi2 = (X[n2, :] - u2).T.dot((X[n2, :] - u2))
    print (kesi1, kesi2)
    
    kesi = 1 / len(X) * (len(n1)*kesi1 + len(n2) * kesi2) 
    
    return p_y, u1, u2, kesi


def p_y_x_same(X_test, p_y, u1, u2, kesi):
    """
    p(y|x) ----> p(x, y) = p(x|y) * p(y)
    p(x|y) ~ N(u, kesi)
    
    """
    px_y1 = p_y * 1 / np.power(np.linalg.det(kesi), 1/2) * \
            np.exp(-1/2 * (X_test - u1).T.dot(kesi).dot(X_test - u1))
    
    px_y0 = (1 - p_y) / np.power(np.linalg.det(kesi), 1/2) * \
            np.exp(-1/2 * (X_test - u2).T.dot(kesi).dot(X_test - u2))
    
    if px_y1 > px_y0:
        return 1
    else:
        return 0


if __name__ == '__main__':
    X, y = iris_data.get_data()
    X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=10, test_size=.25)
    
    ###different kesi
    p_y, u1, u2, kesi1, kesi2 = parameter_diff_cov(X_train, y_train)
    
    res = []
    for i in range(len(X_test)):
        res.append(p_y_x(X_test[i], p_y, u1, u2, kesi1, kesi2))
        
    res = np.array(res)
    
    ###same kesi
    p_y, u1, u2, kesi = parameter_same_cov(X_train, y_train)
    res_same = []
    for i in range(len(X_test)):
        res_same.append(p_y_x_same(X_test[i], p_y, u1, u2, kesi))
    
    res_same = np.array(res_same)
    
    (res_same.reshape(y_test.shape) == y_test).sum() / len(y_test)
    
    