package behavior;

import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
import manager.Manager;
import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;

public abstract class BehaviorCalculator 
{
    //List of each agent's behavior
    Behavior[] behaviorList;
    //Used to determine how extreme mutations will be.
    private double mutationFactor;
    //Reference back to the manager
    Manager mgr;
    
    public abstract void procActions();
    
    /**
     * Adds passed behavior to end of behaviorList
     */
    public void addBehavior(Behavior noob)
    {
        Behavior[] newList = new Behavior[behaviorList.length+1];
        for (int i = 0; i < behaviorList.length; i++)
        {
            newList[i] = behaviorList[i];
        }
        newList[newList.length-1] = noob;
        behaviorList = newList;
    }

    /**
     * Argument: 
     * -reference back to manager
     * -file to load from
     * Goes through list of behaviors in file and translates them into 
     * behavior classes. Checks sim type in file. If local, creates 
     * instance of LocalBehaviorCalculator with list of behaviors and 
     * returns it. If distributed loads up server information and 
     * creates instance of DistributedBehaviorCalculator and returns it.
     */
    public static BehaviorCalculator factory(Manager mgr, double mutFac, String file)
    {
        Properties props = new Properties();
        BehaviorCalculator bc = new LocalBehaviorCalculator(mgr, mutFac);
        try
        {
            FileInputStream fis = new FileInputStream(file);
            props.load(fis);
            fis.close();
            int numAnimals = Integer.parseInt(props.getProperty("numAni"));
            Behavior[] brains = new Behavior[numAnimals];
            for (int i = 0; i < numAnimals; i++)
            {
                int layers = Integer.parseInt(props.getProperty("beha" + i + "layers"));
                int[] nodes = new int[layers];
                for (int j = 0; j < layers; j++)
                {
                    nodes[j] = Integer.parseInt(props.getProperty("beha" + i + "-" + j + "nodes"));
                }
                brains[i] = new Behavior(layers, nodes, props.getProperty("beha" + i + "brain").split((",")));
            }
            if (props.getProperty("simtype").equalsIgnoreCase("local"))
            {
                LocalBehaviorCalculator lbc = new LocalBehaviorCalculator(mgr, mutFac);
                
                for (int cntr = 0; cntr < brains.length; cntr++)
                {
                    lbc.addBehavior(brains[cntr]);
                }
                bc = lbc;
            }
            else if (props.getProperty("simType").equalsIgnoreCase("distributed"))
            {
                //Handle later
            }
        }
        catch(IOException e)
        {
            mgr.unhandledException(e, "An error occured loading from file.");
        }
        return bc;
    }
    /**
     * Argument:
     * -Index to return
     * Returns:
     * -net of desired agent
     */
    public double[] getNetByIndex(int ndx)
    {
        return behaviorList[ndx].getWeights();
    }
    
    /**
     * Argument:
     * -Index of “father”
     * -Index of “mother”
     * -x position of “mother”
     * -y pos of mom
     * -size of map in x direction
     * -size of map in y direction
     * -Generation of “father”
     * -Generation of mom
     * -Highest generation for this species
     * -Age of mom
     * -Lifespan of this species
     * Returns
     * -Amalgam of behavior of parents
     * Generates net IAW section 9 of definition
     */
    public BasicNetwork generateBehavior(int f, int m, int x, int y, int xSize, int ySize,
            int g1, int g2, int gM, int age, int span)
    {
        BasicNetwork fNet = behaviorList[f].net;
        BasicNetwork mNet = behaviorList[m].net;
        BasicNetwork newNet = new BasicNetwork();
                
        //Minus 4 because I'm only counting hidden layers, so ignore the input
        //and output layers of each net.
        int numLayers = (fNet.getLayerCount()+mNet.getLayerCount()-4)/2;
        double multiplier = getMutationFactor() * (Math.abs(x)/xSize+Math.abs(y)/ySize);
        if (x % 2 == 0 && y % 2 == 0 || x % 2 == 1 && y % 2 == 0)
        {
            multiplier += 1;
        }
        else
        {
            multiplier = 1 - multiplier;
        }
        numLayers *= multiplier;
        int prevNumNodes=0;
        newNet.addLayer(new BasicLayer(null, false, fNet.getInputCount()));
        for (int i = 0; i < numLayers; i++)
        {
            int numNodes = (fNet.getLayerNeuronCount(i)+mNet.getLayerNeuronCount(i))/2;
            double a = Math.abs(g1 - g2)/(gM - Math.min(g1, g2)+1);
            if (a > 1)
            {
                a = Math.pow(a, -1);
            }
            multiplier = getMutationFactor() * a;
            if (g1 % 2 == 0)
            {
                multiplier+=1;
            }
            else
            {
                multiplier = 1 - multiplier;
            }
            numNodes *= multiplier;
            newNet.addLayer(new BasicLayer(new ActivationSigmoid(), false, numNodes));
            for (int j = 0; j < mNet.getLayerNeuronCount(i); j++)
            {
                //newNet[i-1][j] = new float[numNodes];
                for (int k = 0; k < fNet.getLayerNeuronCount(i+1); k++)
                {
                    double thisWeight = (mNet.getWeight(i, j, k) + fNet.getWeight(i, j, k))/2;
                    multiplier = getMutationFactor() * age/span;
                    if (age % 2 == 0)
                    {
                        multiplier += 1;
                    }
                    else
                    {
                        multiplier = 1 - multiplier;
                    }
                    newNet.setWeight(i, j, k, thisWeight*multiplier);
                }
            }
            
            prevNumNodes = numNodes;
        }
        newNet.addLayer(new BasicLayer(new ActivationSigmoid(), false, mNet.getOutputCount()));
        return newNet;
    }
    
    public String getBrainString(int i)
    {
        return behaviorList[i].toString();
    }
    
    /**
     * Arguments:
     * -Index of behavior to remove
     */
    public abstract void removeBehavior(int index);

    /**
     * @return the mutationFactor
     */
    public double getMutationFactor() {
        return mutationFactor;
    }

    /**
     * @param mutationFactor the mutationFactor to set
     */
    public void setMutationFactor(double mutationFactor) {
        this.mutationFactor = mutationFactor;
    }
}
