import numpy as np
from sklearn import svm


class MulticlassSVM:

    def __init__(self, mode):
        if mode != 'ovr' and mode != 'ovo' and mode != 'crammer-singer':
            raise ValueError('mode must be ovr or ovo or crammer-singer')
        self.mode = mode

    def fit(self, X, y):
        if self.mode == 'ovr':
            self.fit_ovr(X, y)
        elif self.mode == 'ovo':
            self.fit_ovo(X, y)
        elif self.mode == 'crammer-singer':
            self.fit_cs(X, y)

    def fit_ovr(self, X, y):
        self.labels = np.unique(y)
        self.binary_svm = self.bsvm_ovr_student(X, y)

    def fit_ovo(self, X, y):
        self.labels = np.unique(y)
        self.binary_svm = self.bsvm_ovo_student(X, y)

    def fit_cs(self, X, y):
        self.labels = np.unique(y)
        X_intercept = np.hstack([X, np.ones((len(X), 1))])

        N, d = X_intercept.shape
        K = len(self.labels)

        W = np.zeros((K, d))

        n_iter = 1500
        learning_rate = 1e-8
        for i in range(n_iter):
            W -= learning_rate * self.grad_student(W, X_intercept, y)

        self.W = W

    def predict(self, X):
        if self.mode == 'ovr':
            return self.predict_ovr(X)
        elif self.mode == 'ovo':
            return self.predict_ovo(X)
        else:
            return self.predict_cs(X)

    def predict_ovr(self, X):
        scores = self.scores_ovr_student(X)
        return self.labels[np.argmax(scores, axis=1)]

    def predict_ovo(self, X):
        scores = self.scores_ovo_student(X)
        return self.labels[np.argmax(scores, axis=1)]

    def predict_cs(self, X):
        X_intercept = np.hstack([X, np.ones((len(X), 1))])
        return np.argmax(self.W.dot(X_intercept.T), axis=0)

    def bsvm_ovr_student(self, X, y):
        '''
        Train OVR binary classfiers.

        Arguments:
            X, y: training features and labels.

        Returns:
            binary_svm: a dictionary with labels as keys,
                        and binary SVM models as values.
        '''
        labels = np.unique(y)
        
        # Train N classifiers.
        binary_svm = {}
        for i in range(len(labels)):
            label = labels[i]
            # Assign temporary labels according to the current class.
            y_tmp = y.copy()
            y_tmp[y == label] = 1
            y_tmp[y != label] = 0
            # Train binary classifier.
            classfier = svm.LinearSVC(random_state=12345)
            classfier.fit(X, y_tmp)

            binary_svm[label] = classfier

        return binary_svm


    def bsvm_ovo_student(self, X, y):
        '''
        Train OVO binary classfiers.

        Arguments:
            X, y: training features and labels.

        Returns:
            binary_svm: a dictionary with label pairs as keys,
                        and binary SVM models as values.
        '''
        labels = np.unique(y)

        # Train N(N-1)/2 classifiers.
        binary_svm = {}
        for i in range(len(labels)-1):
            for j in range(i+1, len(labels)):
                first_label = labels[i]
                second_label = labels[j]
               
                # Select X with current first & second labels.
                first_idx = np.where(y == first_label)[0]
                second_idx = np.where(y == second_label)[0]
                idx = np.concatenate((first_idx, second_idx))
                x_selected = X[idx]
                # Assign temporary labels.
                y_selected = y[idx]

                # Train binary classifier.
                classfier = svm.LinearSVC(random_state=12345)
                classfier.fit(x_selected, y_selected)

                binary_svm[(first_label, second_label)] = classfier

        return binary_svm


    def scores_ovr_student(self, X):
        '''
        Compute class scores for OVR.

        Arguments:
            X: Features to predict.

        Returns:
            scores: a numpy ndarray with scores. (#samples, #labels)
        '''
        scores = np.zeros((len(X), len(self.binary_svm.keys())))

        for label in self.binary_svm.keys():
            classifier = self.binary_svm[label]
            scores[:, label] = classifier.decision_function(X)
        
        return scores


    def scores_ovo_student(self, X):
        '''
        Compute class scores for OVO.

        Arguments:
            X: Features to predict.

        Returns:
            scores: a numpy ndarray with scores.
        '''
        num_labels = len(self.labels)
        scores = np.zeros((len(X), num_labels))

        # Get votes for each sample provided by each classifier.
        votes = []
        for label_pair in self.binary_svm.keys():
            classifier = self.binary_svm[label_pair]
            votes.append(classifier.predict(X))
        votes = np.array(votes)  # Dimension [N(N-1)/2, N]

        # Count the number of occurance for each label of each data point.
        
        for i in range(votes.shape[1]):
            x_vote = votes[:, i]  # The votes for a data point from all the classifiers.
            scores[i] = np.bincount(x_vote, minlength=num_labels)

        return scores


    def loss_student(self, W, X, y, C=1.0):
        '''
        Compute loss function given W, X, y.

        For exact definitions, please check the MP document.

        Arugments:
            W: Weights. Numpy array of shape (K, d)
            X: Features. Numpy array of shape (N, d)
            y: Labels. Numpy array of shape N
            C: Penalty constant. Will always be 1 in the MP.

        Returns:
            The value of loss function given W, X and y.
        '''
        # Dimensions.  
        K = W.shape[0]  # Number of labels.
        N = X.shape[0]  # Number of samples.
        d = X.shape[1]  # Number of features.

        # Delta (N, K).
        delta = np.zeros((N, K))
        delta[np.arange(N), y] = 1

        # Computation inside max clause, resulting in (N, K).
        inner = 1 - delta + X.dot(W.T)  
        # Find max in the inner matrix along row, resulting in (N,).
        max_inner = np.amax(inner, axis=1)
        # Compute X_i by W_yi, resulting in (N,).
        product = (X * W[y]).sum(axis=1)
        # Obtain loss on the slack variable term.
        slack_loss = C * ((max_inner - product).sum())

        # Compute the regularization term (l2 norm).
        l2_loss = (np.linalg.norm(W) ** 2) * 0.5

        total_loss = l2_loss + slack_loss
        #print('total loss {}'.format(total_loss))

        return total_loss


    def grad_student(self, W, X, y, C=1.0):
        '''
        Compute gradient function w.r.t. W given W, X, y.

        For exact definitions, please check the MP document.

        Arugments:
            W: Weights. Numpy array of shape (K, d)
            X: Features. Numpy array of shape (N, d)
            y: Labels. Numpy array of shape N
            C: Penalty constant. Will always be 1 in the MP.

        Returns:
            The graident of loss function w.r.t. W,
            in a numpy array of shape (K, d).
        '''
        # Dimensions.  
        K = W.shape[0]  # Number of labels.
        N = X.shape[0]  # Number of samples.
        d = X.shape[1]  # Number of features.

        # Delta (N, K).
        delta = np.zeros((N, K))
        delta[np.arange(N), y] = 1

        # Find index (0 to K-1) of the max for each sample.
        inner = 1 - delta + X.dot(W.T)
        max_index = np.argmax(inner, axis=1)  # (N, 1)

        # Gradient.
        loss_grad = np.zeros((K, d))
        for i in range(N):
            idx = max_index[i]
            loss_grad[idx] = loss_grad[idx] + X[i]
            loss_grad[y[i]] = loss_grad[y[i]] - X[i]
        
        norm_grad = W
        gradient = norm_grad + C * loss_grad

        return gradient

