import numpy as np
from .metrics import accuracy


class LogisticRegression:
    """多维逻辑回归回归 向量计算"""

    def __init__(self):
        self.intercept_ = None
        self.k_ = None
        self._theta = None

    def _sigmoid(self, t):
        """转化成概率"""
        return 1. / (1. + np.exp(-t))

    def fit(self, X_train, y_train):
        """梯段下降法计算theta"""

        def J(X_b, y_train, theta):
            y_hat = self._sigmoid(X_b.dot(theta))
            try:
                return - np.sum(y_train * np.log(y_hat) + (1 - y_train) * (
                    np.log(1 - y_hat))) / len(y_train)
            except:
                return float("inf")

        def dJ(X_b, y_train, theta):
            return X_b.T.dot(self._sigmoid(X_b.dot(theta)) - y_train) / len(X_b)

        def grediant_vector(X_b, y_train, theta, eta=0.01, epsilon=1e-8, n_iter=1e4):
            m_iter = 0
            while m_iter < n_iter:
                last_theta = theta
                theta = theta - eta * dJ(X_b, y_train, theta)
                if np.absolute(J(X_b, y_train, theta) - J(X_b, y_train, last_theta)) < epsilon:
                    break
                m_iter += 1
            return theta

        X_b = np.hstack([np.ones(shape=(len(X_train), 1)), X_train])
        theta = np.zeros(X_b.shape[1])
        self._theta = grediant_vector(X_b, y_train, theta)
        self.intercept_ = self._theta[0]
        self.k_ = self._theta[1:]
        return self

    def predict_prob(self, X_test):
        X_b = np.hstack((np.ones(shape=(len(X_test), 1)), X_test))
        return self._sigmoid(X_b.dot(self._theta))

    def predict(self, X_test):
        return np.array(self.predict_prob(X_test) >= 0.5, dtype='int')

    def score(self, X_test, y_test):
        return accuracy(self.predict(X_test), y_test)

    def __repr__(self):
        return "LogisticRegression"
