#!/usr/bin/env python3

#*
from utils import load_data, sigmoid
import numpy as np

#* load data
train_X_orig, train_Y, test_X_orig, test_Y, classes = load_data()

#* preprocessing
train_X = train_X_orig.reshape(train_X_orig.shape[0], -1)/255.
test_X = test_X_orig.reshape(test_X_orig.shape[0], -1)/255.

#* model
def initialize_with_zeros(dim):
  w = np.zeros((dim, 1))
  b = 0.
  return w, b

def propagate(w, b, X, Y):
  m = X.shape[0]

  Z = X@w + b
  P = sigmoid(Z)
  cost = np.squeeze(-np.mean(Y*np.log(P) + (1 - Y)*np.log(1 - P), axis=0))
  dw = X.T@(P - Y)/m
  db = np.sum(P - Y)/m

  grads = {
    "dw": dw,
    "db": db,
  }
  return grads, cost

def optimize(w, b, X, Y, n_iters, learning_rate, print_cost=False):
  costs = []
  for i in range(n_iters):
    grads, cost = propagate(w, b, X, Y)
    w -= learning_rate*grads['dw']
    b -= learning_rate*grads['db']
    if i % 100 == 0:
      costs.append(cost)
      if print_cost:
        print(f'iteration {i}: cost {cost}')
  params = {
    'w':w,
    'b':b,
  }
  return params, grads, costs

def predict(w, b, X):
  P = sigmoid(X@w + b)
  Y_pred = P > 0.5
  return Y_pred

def model(X_train, Y_train, X_test, Y_test, n_iters=2000, learning_rate=0.5, print_cost=False):
  w, b = initialize_with_zeros(X_train.shape[1])
  params, grads, costs = optimize(w, b, X_train, Y_train, n_iters, learning_rate, print_cost)

  w = params['w']
  b = params['b']

  Y_pred_train = predict(w, b, X_train)
  Y_pred_test = predict(w, b, X_test)

  print(f'train accuracy: {np.mean(Y_pred_train == Y_train)}')
  print(f'test accuracy: {np.mean(Y_pred_test == Y_test)}')

  return {
    'costs': costs,
    'Y_pred_train': Y_pred_train,
    'Y_pred_test': Y_pred_test,
    'w': w,
    'b': b,
    'learning_rate': learning_rate,
    'n_iters': n_iters, 
  }

#*
d = model(train_X, train_Y, test_X, test_Y, learning_rate=0.005, print_cost=True)
