/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package org.neuralstudio.utils;

import java.util.ArrayList;
import java.util.List;
import org.jane.core.Neuron;
import org.jane.core.TransferFunctions;
import org.jane.core.functions.HardLimitFunction;
import org.jane.core.functions.ITransferFunction;
import org.jane.core.functions.LogSigmoidFunction;
import org.jane.core.functions.PosLinFunction;
import org.jane.core.functions.PureLinFunction;
import org.jane.core.functions.TanhFunction;
import org.jane.core.layers.AbstractNeuronLayer;
import org.jane.core.layers.SimpleLayer;
import org.jane.core.networks.AbstractNeuralNetwork;
import org.jane.core.networks.Adaline;
import org.jane.core.networks.CompetitiveNetwork;
import org.jane.core.networks.MultiLayerPerceptron;
import org.jane.core.networks.Perceptron;
import org.jane.core.trainers.BackpropagationTrainer;
import org.jane.core.trainers.DeltaRuleTrainer;
import org.jane.core.trainers.PerceptronTrainer;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;

/**
 *
 * @author william
 */
public class NeuralSaxHandler extends DefaultHandler {
    /*
     * 1 perceptron
     * 2 adaline
     * 3 mperceptron
     * 4 competitive
     */
    private int networkType;
    private List<List<Neuron>> netStructure;
    private List<Neuron> layer;
    private List<Float> weights;
    private ITransferFunction transferFunction;
    private Neuron neuron;
    //private boolean isLayer;
   // private boolean isNeuron;
    private boolean isWeight;
    private float learningRate;
    private AbstractNeuralNetwork network;

    /*
     *  <network type = "mperceptron" learningRate = "">
            <layer type="sigmoid">
                <neuron sensitivity="" bias="">
                    <weight></weight>
     *              <weight></weight>
     *          </neuron>
            </layer>
     *      <layer type="linear">
                <neuron sensitivity="" bias="">
                    <weight></weight>
     *              <weight></weight>
     *          </neuron>
            </layer>
     *  </network>
     *
     */
    @Override
    public void startElement(String uri, String localName, String qName, Attributes attrs) {
       if ("network".equals(qName)) {
           this.learningRate = Float.parseFloat(attrs.getValue("learningrate"));
           this.netStructure = new ArrayList<List<Neuron>>();
           if ("perceptron".equals(attrs.getValue("type"))) {
               this.networkType = 1;
           }
           if ("adaline".equals(attrs.getValue("type"))) {
               this.networkType = 2;
           }
           if ("mperceptron".equals(attrs.getValue("type"))) {
               this.networkType = 3;
           }
           if ("competitive".equals(attrs.getValue("type"))) {
               this.networkType = 4;
           }
       }
       if ("layer".equals(qName)){
           //this.isLayer = true;

           String layerType = attrs.getValue("type");
           if ("logsigmoid".equals(layerType)) {
               this.transferFunction = new LogSigmoidFunction();
           }
           if ("hardlimit".equals(layerType)) {
                this.transferFunction = new HardLimitFunction();
           }
           if ("poslin".equals(layerType)) {
                this.transferFunction = new PosLinFunction();
           }
           if ("purelin".equals(layerType)) {
                this.transferFunction = new PureLinFunction();
           }
           if ("tanh".equals(layerType)) {
                this.transferFunction = new TanhFunction();
           }
           this.layer = new ArrayList<Neuron>();
       }
       if ("neuron".equals(qName)) {
           //this.isNeuron = true;
           this.neuron = new Neuron(this.transferFunction);
           this.neuron.setBias(Float.parseFloat(attrs.getValue("bias")));
           this.neuron.setSensitivity(Float.parseFloat(attrs.getValue("sensitivity")));
           this.weights = new ArrayList<Float>();
       }
       if ("weight".equals(qName)) {
           this.isWeight = true;
       }

    }

    @Override
    public void endElement(String uri, String localName, String qName) throws SAXException {
        if ("neuron".equals(qName)) {
            this.neuron.setWeights(this.weights);
            this.layer.add(neuron);
            //this.isNeuron = false;
        }
        if ("weight".equals(qName)) {
           this.isWeight = false;
       }
        if ("layer".equals(qName)) {
            this.netStructure.add(this.layer);
            //this.isLayer = false;
        }
        if ("network".equals(qName)) {
            this.createNetwork();
        }

    }

    @Override
    public void characters(char[] ch, int start, int length) throws SAXException {
        if(this.isWeight) {
            this.weights.add(Float.parseFloat(new String(ch,start,length)));
        }
        
    }

    private void createNetwork() {
        AbstractNeuralNetwork net = null;
        if (this.networkType == 1) {
            net = new Perceptron(new PerceptronTrainer(), this.netStructure.get(0).size());
            SimpleLayer l = new SimpleLayer(this.netStructure.get(0).size(),TransferFunctions.HARDLIMIT);
            for (Neuron n : this.netStructure.get(0)) {
                l.addNeuron(n);
            }
            ((Perceptron)net).setNeurons(l);
            ((Perceptron)net).setLearningRate(this.learningRate);
        }
        if (this.networkType == 2) {
            net = new Adaline(new DeltaRuleTrainer(), this.netStructure.get(0).size());
            SimpleLayer l = new SimpleLayer(this.netStructure.get(0).size(),TransferFunctions.PURELIN);
            for (Neuron n : this.netStructure.get(0)) {
                l.addNeuron(n);
            }
            ((Adaline)net).setNeurons(l);
            ((Adaline)net).setLearningRate(this.learningRate);
        }
        if (this.networkType == 3) {
            net = new MultiLayerPerceptron(new BackpropagationTrainer(),this.learningRate);
            List<SimpleLayer> l = new ArrayList<SimpleLayer>();
            for (List<Neuron> ln: this.netStructure) {
                SimpleLayer sl = new SimpleLayer(ln.size(),ln.get(0).getTransferFunctionObject() instanceof LogSigmoidFunction?TransferFunctions.LOGSIGMOID:(ln.get(0).getTransferFunctionObject() instanceof TanhFunction)?TransferFunctions.TANH:TransferFunctions.PURELIN);
                int j = 0;
                for(Neuron n : ln) {
                    sl.setNeuron(j,n);
                    j++;
                }
                l.add(sl);
            }
            ((MultiLayerPerceptron)net).setNeurons(l);

        }
        if (this.networkType == 4) {
            net = new CompetitiveNetwork(this.netStructure.get(0).size(),this.learningRate);
            List<AbstractNeuronLayer> l = new ArrayList<AbstractNeuronLayer>(2);
            SimpleLayer sl = new SimpleLayer(this.netStructure.get(0).size(),TransferFunctions.PURELIN);
            for(Neuron n : this.netStructure.get(0)) {
                sl.addNeuron(n);
            }
            l.add(sl);
            sl = new SimpleLayer(this.netStructure.get(1).size(),TransferFunctions.POSLIN);
            for(Neuron n : this.netStructure.get(1)) {
                sl.addNeuron(n);
            }
            l.add(sl);
            ((CompetitiveNetwork)net).setNeurons(l);
        }
        this.network = net;
    }

    public AbstractNeuralNetwork getNetwork() {
        return network;
    }

    public void setNetwork(AbstractNeuralNetwork network) {
        this.network = network;
    }



}
