# Neural Network of Beer Revision 2

import sys
import math
import itertools
import random
from getdata import *  # updated from revision 1

ETA = 1

class Perceptron:
    '''This class is a perceptron'''
    def __init__(self, inputs, weights=None, bias=None):
        '''inputs is a list of inputs, weights is a list of weights corresponding to the inputs'''
        self.inputs = [];

        # The constructor can take Perceptrons as inputs, but the math can't.  This disambiguates them.
        for i in range(len(inputs)):
            if type(inputs[i]) is Perceptron:
                self.inputs.append(inputs[i].output);
            else:
                self.inputs.append(inputs[i]);

        # Bias is an optional parameter.  Unfortunately, Python's concept of an optional paramter is a little wiggy.
        if bias == None:
            self.bias = 0;
        else:
            self.bias = bias;

        # If weights weren't provided, randomly generate them between -1 and 1.
        if weights == None:
            self.weights = [];
            for i in range(len(inputs)):
                self.weights.append(5 * random.random() - 2.5);
        else:
            self.weights = weights


        self.output = DataNode();

    def activity_function(self):
        '''computes the activity function'''
        activity = 0
        for i in range(0, len(self.inputs)):
            activity += self.inputs[i].getValue() * self.weights[i]
        activity += self.bias
        return activity

    def activation_function(self, y):
        '''sigmoid on activity function'''
        activation = 1 / (1 + math.exp(-y))
        return activation

    # Calculate the output value.
    def compute(self):
        '''this checks that we have the correct number of weights and inputs, then computes the output of the perceptron'''
        if len(self.inputs) != len(self.weights):
            print("Error, different number of inputs and weights")
            print("weights: ", len(self.weights))
            print("inputs:", len(self.inputs))
            exit(1)
        else:
            activity = self.activity_function()
            activation = self.activation_function(activity)

        self.output.setValue(activation);
        return activation

    # Training the node using the provided delta function.
    def update_weights(self, delta):
        for i in range(0, len(self.weights)):
            self.weights[i] = self.weights[i] + ETA * delta * self.inputs[i].getValue();
        return

    # Since the output is stored on an object, this is just a convenience function to reduce typing.
    def getOutput(self):
        return self.output.getValue();

    # prettier printing.
    def __str__(self):
        outString = "";
        for x in range(len(self.weights)):
            outString += "W" + str(x) + "=" + str(self.weights[x]) + " ";
        outString += "Bias=" + str(self.bias);
        return outString;

'''This type stores our data.  The reference is stored at the input node, that way we don't have to extract the output of a 
perceptron and apply it to the input of the next layer.  The next layer is already aware of this node, and extracts it as part of 
the calculation.'''
class DataNode:
    def __init__(self):
        self.value = 0;

    def setValue(self, value):
        self.value = value;

    def getValue(self):
        return self.value;

    def __str__(self):
        return str(self.value);

class NeuralNetwork:
    def __init__(self):
        self.network = [];

    def addPerceptron(self, perceptron, layer):
        while len(self.network) < layer:
            self.network.append([]);
        self.network[layer - 1].append(perceptron);

    def applyInputVector(self, vector):
        # Loop over all nodes in the network.
        for i in range(len(self.network)):
            for j in range(len(self.network[i])):
                self.network[i][j].compute();  # Compute this nodes output

    def updateWeights(self, deltaValue):

        # Loop over all nodes in the network.
        for i in range(len(self.network)):
            for j in range(len(self.network[i])):
                self.network[i][j].update_weights(deltaValue);  # Compute this nodes output

    def printNet(self):
        print("Neural Network")
        for i in range(len(self.network)):
            print("Layer " + str(i));
            for j in range(len(self.network[i])):
                print(self.network[i][j]);

'''given an expected value and actual value, calculate and append the delta, error and append to the errors array'''
def calculate_output_delta(expected, actual):
    error = expected - actual
    delta = error * (1 - actual) * actual
    return delta

'''This is a really bad vehicle for doing this.  But...whatever.'''
def buildInputArray(nameArray, inputMap):
    arr = [];
    for x in nameArray:
        arr.append(inputMap[x]);
    return arr;

'''Factory method for Perceptrons using a list of strings and a map of strings-to-DataNode'''
def createPerceptronWithInputs(inputs, inputsMap, bias=0):
    perceptronIns = [];
    for x in inputs:
        if x in inputsMap:
            perceptronIns.append(inputsMap[x]);
        else:
            d = DataNode();
            inputsMap[x] = d;
            perceptronIns.append(d);
    return Perceptron(perceptronIns);


'''System takes 4 inputs TrainingCSV, Columns, TestCSV, Columns  '''
def main():
# train the network
    beer = data(sys.argv[1])  # updated from revision 1...input 1 is csv file input 2 is number of columns including the rating column
    beer_data = beer.create()

    print("Read these headers from CSV " + str(beer_data.headers));

    inputMap = {};

    originInputs = ["Belgian And French Origin Ales", "British Origin Ales", "European-germanic Lager", "German Origin Ales", "Hybrid/mixed Beer", "International Ale Styles", "International Styles", "Irish Origin Ales", "Mead", "North American Lager", "North American Origin Ales", "Other Lager", "Other Origin"];

    # First order inputs
    originPerceptron = createPerceptronWithInputs(originInputs, inputMap);
    abvPerceptron = createPerceptronWithInputs(["abv"], inputMap);
    ibuPerceptron = createPerceptronWithInputs(["ibu"], inputMap);

    # An attempt to correlate combinations of 2 inputs.
    abvIbu = Perceptron([abvPerceptron, ibuPerceptron]);
    abvOrigin = Perceptron([abvPerceptron, originPerceptron]);
    ibuOrigin = Perceptron([originPerceptron, ibuPerceptron]);

    #output
    outputNode = Perceptron([abvIbu, abvOrigin , ibuOrigin]);

    
    net = NeuralNetwork();
    net.addPerceptron(ibuPerceptron, 1);
    net.addPerceptron(abvPerceptron, 1);
    net.addPerceptron(originPerceptron, 1);
    net.addPerceptron(abvIbu, 2);
    net.addPerceptron(abvOrigin, 2);
    net.addPerceptron(ibuOrigin, 2);
    net.addPerceptron(outputNode, 3);

    print("PRE-TRAINING NETWORK");
    net.printNet();

    n = 0
    num_iterations = 1200
    while n < num_iterations:
        value = 0
        while value < len(beer_data):
            try:
                expected = beer_data.getRating(value) / 5
            except IndexError:
                print("");
            beer_data.fillDataForIndex(value, inputMap);
            net.applyInputVector(inputMap);

            delta_o = calculate_output_delta(expected, outputNode.getOutput());
            net.updateWeights(delta_o)
            value += 1
        n += 1


    # test
    beer = data(sys.argv[2])  # updated from revision 1...input 1 is csv file input 2 is number of columns including the rating column
    testData = beer.create()
    value = 0

    # Create a CSV output stream.
    outfile = open("test_values.csv", 'w')
    test_file = csv.writer(outfile, lineterminator='\n', delimiter=',')

    # Write the headers to file.
    headers = ["predicted", "expected", "Error"];
    test_file.writerow(headers);


    meanSquaredError = 0;
    comparisonMSE = 0;
    dataLength = len(testData);  # Storage to prevent recalculation.
    # Loop over the accumulated data


    print("POST TRAINING NETWORK");
    net.printNet();

    while value < dataLength:
        testData.fillDataForIndex(value, inputMap);  # Predict the value
        net.applyInputVector(inputMap);

        predictedRating = outputNode.getOutput() * 5;  # Predicted rating is *5
        knownRating = testData.getRating(value);
        error = predictedRating - knownRating;
        completelyRandomPrediction = 5 * random.random();

        pad = '{0:.2f}';
        beautifiedTuple = (pad.format(predictedRating), pad.format(knownRating), pad.format(error));
        test_file.writerow(beautifiedTuple);  # Append the row to file.

        meanSquaredError += 1 / dataLength * error ** 2;  # Summing up for the MSE
        comparisonMSE += 1 / dataLength * (completelyRandomPrediction - knownRating) ** 2;  # Testing for significant improvement over random prediction.

        value += 1  # Loop Counter

    print("Mean Squared Error: " + str(meanSquaredError));
    print("Mean Squared Error of a random prediction: " + str(comparisonMSE));


if __name__ == '__main__':
    main()

