from BaumWelch import *


def viterbi(mean, var, aij, obs):
    """
    Decoding Question
    """
    dim, t_len = obs.shape

    mean = np.concatenate((np.full([dim, 1], np.nan), mean, np.full([dim, 1], np.nan)), axis=1)

    var = np.concatenate(([np.full([dim, 1], np.nan), var, np.full([dim, 1], np.nan)]), axis=1)

    aij[-1][-1] = 1

    m_len = mean.shape[1]

    fjt = np.full([m_len, t_len], -np.inf)

    s_chain = np.empty((m_len, t_len), dtype=object)
    for i in range(m_len):
        for j in range(t_len):
            s_chain[i][j] = []

    for j in range(1, m_len - 1):
        fjt[j, 0] = np.log(aij[0, j]) + logGaussian(mean[:, j], var[:, j], obs[:, 0])
        if fjt[j, 0] > -np.inf:
            list1 = [0, j]
            s_chain[j, 0] = list1

    for t in range(1, t_len):
        for j in range(1, m_len - 1):
            f_max = -np.inf
            i_max = -1
            f = -np.inf
            for i in range(1, j + 1):
                if fjt[i, t - 1] > -np.inf:
                    f = fjt[i, t - 1] + np.log(aij[i, j]) + logGaussian(mean[:, j], var[:, j], obs[:, t])
                if f > f_max:
                    f_max = f
                    i_max = i

            if i_max != -1:
                for item in s_chain[i_max, t - 1]:
                    s_chain[j, t].append(item)
                s_chain[j, t].append(j)
                fjt[j, t] = f_max

    fopt = -np.inf
    iopt = -1
    for i in range(1, m_len - 1):
        f = fjt[i, t_len - 1] + np.log(aij[i, m_len - 1])
        if f > fopt:
            fopt = f
            iopt = i

    if iopt != -1:
        chain_opt = []
        for item in s_chain[iopt, t]:
            chain_opt.append(item)
        chain_opt.append(m_len - 1)

    return fopt


def HMM_test(mean, var, Aij, testing_data):
    num_of_model = 11
    num_of_error = 0
    num_of_testing = 0

    for features in testing_data:
        k = features["modelID"]
        if k == "Z":
            k = 0
        elif k == "O":
            k = 10
        else:
            k = int(k)
        obs = features["features"]
        num_of_testing = num_of_testing + 1

        fopt_max = -np.inf
        digit = -1
        for p in range(num_of_model):
            fopt = viterbi(mean[:, :, p], var[:, :, p], Aij[:, :, p], obs)
            if fopt > fopt_max:
                digit = p
                fopt_max = fopt

        if digit != k:
            num_of_error = num_of_error + 1
            print(str(num_of_testing) + '###error###')

        else:
            print(str(num_of_testing) + '###correct###')

    accuracy_rate = (num_of_testing - num_of_error) * 100 / num_of_testing
    return accuracy_rate
