/**
 * Copyright 2010 Neuroph Project http://neuroph.sourceforge.net
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package main;

import java.io.File;
import java.util.Arrays;


import neuralnetwork.MultiLayerPerceptron;
import neuralnetwork.SmoothingAlgorithm;
import org.neuroph.core.NeuralNetwork;
import org.neuroph.core.learning.SupervisedTrainingElement;
import org.neuroph.core.learning.TrainingSet;
import org.neuroph.util.TransferFunctionType;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;

/**
 * This sample shows how to create, train, save and load simple Multi Layer Perceptron for the XOR problem.
 * This sample shows basics of Neuroph API.
 * @author Zoran Sevarac <sevarac@gmail.com>
 */
public class XorMultiLayerPerceptronSample {
	
	static final Logger logger = Logger.getLogger(XorMultiLayerPerceptronSample.class);
    /**
     * Runs this sample
     */
    public static void main(String[] args) {
    	
        // create training set (logical XOR function)
        TrainingSet<SupervisedTrainingElement> trainingSet = new TrainingSet<SupervisedTrainingElement>(2,1);

        trainingSet.addElement(new SupervisedTrainingElement(new double[]{1, 1}, new double[]{1}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{0, 0}, new double[]{0}));
//        trainingSet.addElement(new SupervisedTrainingElement(new double[]{0, 1}, new double[]{0}));
//        trainingSet.addElement(new SupervisedTrainingElement(new double[]{1, 0}, new double[]{0}));
//        trainingSet.addElement(new SupervisedTrainingElement(new double[]{0.5, 0.5}, new double[]{1}));
//        trainingSet.addElement(new SupervisedTrainingElement(new double[]{0, 0.5}, new double[]{1}));

        
        // File (or directory) with old name
        File file = new File("Log/neuralnetworks.log");
        String str = "Log/neuralnetworks_"+Long.toString(System.currentTimeMillis())+".log";
        System.out.println(file.exists());
        // File (or directory) with new name
        File file2 = new File(str);
        
        // Rename file (or directory)
        boolean success = file.renameTo(file2);
        
        
        PropertyConfigurator.configure("Logging Properties/log4j.properties");
        
        if(!success){
        	logger.fatal("Log file couldnot be renamed");
        }
        
        

        // create multi layer perceptron
        logger.setLevel(Level.FATAL);        
        MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2,1);
        
        
        myMlPerceptron.setLearningRule(new SmoothingAlgorithm());
        ((SmoothingAlgorithm)myMlPerceptron.getLearningRule()).setBatchMode(false);
        
        
        
//        myMlPerceptron.randomizeWeights();
        
        
        myMlPerceptron.getLayersCount();
        
        
	            // learn the training set
	            logger.fatal("Training the neural network");   

	            myMlPerceptron.learn(trainingSet);
	            // save traine                
        // test perceptron
        logger.fatal("Testing trained neural network");
        testNeuralNetwork(myMlPerceptron, trainingSet);
    
    }

    /**
     * Prints network output for the each element from the specified training set.
     * @param neuralNet neural network
     * @param trainingSet training set
     */
    public static void testNeuralNetwork(NeuralNetwork neuralNet, TrainingSet<SupervisedTrainingElement> trainingSet) {

        for(SupervisedTrainingElement trainingElement : trainingSet.elements()) {
            neuralNet.setInput(trainingElement.getInput());
            neuralNet.calculate();
            double[] networkOutput = neuralNet.getOutput();

            logger.fatal("Input: " + Arrays.toString( trainingElement.getInput() ) );
            logger.fatal(" Output: " + Arrays.toString( networkOutput) );
        }
    }

}
