"""

"""

import numpy as np
import seaborn as sns
import pandas as pd
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt

def error_analysis(pred_y, true_y, labels=[0, 1, 2, 3]):
    """
   This function compute precision, recall, accuracy and f1-score.
   
   Args:
   - pred_y: :numpy.array: (n,) predict labels.
   - true_y: :numpy.array: (n,) ground truth labels.
   - labels: :list: labels for the task.
    """
    l = len(labels)
    n = len(pred_y)
    labels = [0,1,2,3]
    c_mtx = np.zeros((l, l))
    recall = np.zeros((l,))
    precision = np.zeros((l,))
    f1_score = np.zeros((l,))
    total_accuracy = (pred_y == true_y).sum() / len(pred_y)
    print('total_accuracy: ', total_accuracy)

    # Compute the confusion matrix.
    for i in range(n):
        c_mtx[pred_y[i], true_y[i]] += 1

    # Compute the precision
    for i in range(l):
        precision[i] = c_mtx[i,i] / c_mtx[i,:].sum()
        recall[i] = c_mtx[i,i] / c_mtx[:,i].sum()
        f1_score[i] = 2 / ((1 / precision[i]) + (1 / recall[i]))
    
    # visialize confusion matrix
    sns.set()
    f,ax = plt.subplots()
    c2 = confusion_matrix(true_y, pred_y, labels).T
    sns.heatmap(c2, annot=True, ax=ax, cmap='YlGnBu', fmt='d')
    
    ax.set_title('Confusion matrix')
    ax.set_xlabel('True')
    ax.set_ylabel('Prediction')
    
    plt.show()
    
    # visualize the table.
    prf = np.stack((precision,recall,f1_score)).T
    df = pd.DataFrame(prf, columns=['precision','recall','f1_score'])
    print(df)
    
    