import numpy as np 
from OMAI.lnnModel import LnnModel

class TrainingModel():

    lnn_model = None
    parameters = None
    dataset_dims = [8,1,6]
    X = np.random.randn( dataset_dims[0], dataset_dims[1] )
    Y = np.random.randn( dataset_dims[2], dataset_dims[1] )

    def __init__(self, lnn_model):
        self.lnn_model = lnn_model
        
        layer_dims = [8,4,4,6]
        self.parameters = lnn_model.initialize_parameters_deep(layer_dims)

    def L_layer_model(self, learning_rate = 0.0075, num_iterations = 3000, print_cost=True):#lr was 0.009
        """
        Implements a L-layer neural network: [LINEAR->RELU]*(L-1)->LINEAR->SIGMOID.
        
        Arguments:
        X -- data, numpy array of shape (number of examples, num_px * num_px * 3)
        Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples)
        layers_dims -- list containing the input size and each layer size, of length (number of layers + 1).
        learning_rate -- learning rate of the gradient descent update rule
        num_iterations -- number of iterations of the optimization loop
        print_cost -- if True, it prints the cost every 100 steps
        
        Returns:
        parameters -- parameters learnt by the model. They can then be used to predict.
        """
        np.random.seed(1)
        costs = []                         # keep track of cost
        
        # Parameters initialization.
        # parameters = lnnModel.initialize_parameters_deep(layers_dims)
        ### END CODE HERE ###
        
        # Loop (gradient descent)
        for i in range(0, num_iterations):

            # Forward propagation: [LINEAR -> RELU]*(L-1) -> LINEAR -> SIGMOID.
            AL, caches = self.lnn_model.L_model_forward(self.X, self.parameters)
            
            # Compute cost.
            cost = self.lnn_model.compute_cost_softmax(AL, self.Y)
        
            # Backward propagation.
            grads = self.lnn_model.L_model_backward_softmax(AL, self.Y, caches)
     
            # Update parameters.
            self.parameters = self.lnn_model.update_parameters(self.parameters, grads, learning_rate)
                    
            # Print the cost every 100 training example
            if print_cost and i % 100 == 0:
                print ("Cost after iteration %i: %f" %(i, cost))
            if print_cost and i % 100 == 0:
                costs.append(cost)

        return self.parameters
    