import numpy as np
import matplotlib.pyplot as plt
import h5py

train_dataset = h5py.File('logistic-regression/datasets/train_catvnoncat.h5', "r")
test_dataset = h5py.File('logistic-regression/datasets/test_catvnoncat.h5', "r")
train_set_x_orig = np.array(train_dataset["train_set_x"][:])
train_set_y_orig = np.array(train_dataset["train_set_y"][:])
test_set_x_orig = np.array(test_dataset["test_set_x"][:])
test_set_y_orig = np.array(test_dataset["test_set_y"][:])
classes = np.array(test_dataset["list_classes"][:])

train_set_y_orig = train_set_y_orig.reshape((1, train_set_y_orig.shape[0]))
test_set_y_orig = test_set_y_orig.reshape((1, test_set_y_orig.shape[0]))

train_set_x_flatten  = train_set_x_orig.reshape(train_set_x_orig.shape[0], -1).T
test_set_x_flatten = test_set_x_orig.reshape(test_set_x_orig.shape[0], -1).T

train_set_x = train_set_x_flatten / 255
test_set_x = test_set_x_flatten / 255

# print(train_set_x.shape)
# plt.imshow(train_set_x_orig[70])
# plt.show()

class LogisticRegression:
    def __init__(self, num_iterations = 5000 , learning_rate = 0.005):
        self.w = np.zeros(shape = (12288, 1))
        self.b = 0
        self.num_iterations = num_iterations
        self.learning_rate = learning_rate

    def train(self, X, Y):
        costs = []
        for i in range(self.num_iterations):
            grads, cost = LogisticRegression.propagate(self.w, self.b, X, Y)
            dw = grads["dw"]
            db = grads["db"]
            self.w = self.w - self.learning_rate * dw
            self.b = self.b - self.learning_rate * db

            if i % 100 == 0:
                costs.append(cost)
                print("迭代的次数: %i ， 误差值： %f" % (i,cost))

        params  = {
                "w" : self.w,
                "b" : self.b }
        grads = {
                "dw": dw,
                "db": db } 
        return (params , grads , costs)

    @staticmethod
    def propagate(w, b, X, Y):
        m = X.shape[1]
        A = LogisticRegression.sigmoid(np.dot(w.T,X) + b)
        cost = (- 1 / m) * np.sum(Y * np.log(A) + (1 - Y) * (np.log(1 - A)))
        
        #反向传播
        dw = (1 / m) * np.dot(X, (A - Y).T)
        db = (1 / m) * np.sum(A - Y)
        cost = np.squeeze(cost)

        grads = {
            "dw": dw,
            "db": db
        }
        return (grads , cost)

    @staticmethod
    def sigmoid(z):
        s = 1 / (1 + np.exp(-z))
        return s

    def predict(self, X):
        m  = X.shape[1]
        Y_prediction = np.zeros((1,m)) 
        w = self.w.reshape(X.shape[0],1)

        A = LogisticRegression.sigmoid(np.dot(w.T , X) + self.b)
        for i in range(A.shape[1]):
            #将概率a [0，i]转换为实际预测p [0，i]
            Y_prediction[0,i] = 1 if A[0,i] > 0.5 else 0

        return Y_prediction

    def test(self, X, Y):
        Y_prediction = self.predict(X)
        print("训练集准确性："  , format(100 - np.mean(np.abs(Y_prediction - Y)) * 100) ,"%")


logistic_regression = LogisticRegression()
logistic_regression.train(train_set_x, train_set_y_orig)
logistic_regression.test(test_set_x, test_set_y_orig)
logistic_regression.test(train_set_x, train_set_y_orig)