/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package technotheist.imgcog.net;

import com.amd.aparapi.Kernel;

/**
 *
 * @author Mitch
 */
public abstract class FeedFwdNet extends Net {



    @Override
    protected FeedFwdNetRunner runner() {
        return (FeedFwdNetRunner) super.runner();
    }

    protected void setLayer0(int layer) {
        runner().Layer0[0] = layer;
        runner().put(runner().Layer0);
    }

    protected abstract class FeedFwdNetRunner extends NetRunner {

//        static final byte BIPOLAR = 0x1;
//        static final byte SIGMOID = 0x2;
//        static final byte STEP = 0x4;
//        static final byte SHARED_BIAS = 0x8;
//        static final byte SHARED_WEIGHT = 0x10;
//        static final byte MULT_INPUTS = 0x20;
//        //
//        //
//        protected final int inputSize;
//        protected final int outputSize;
//        protected final float[] input;
//        protected final float[] output;
//        //
//        private final int[] Mode = new int[1];
        private final int[] Layer0 = new int[1];
        //
//        private final int nodeCountTotal;
//        private final int nodeCountTotal_even;
//        private final int nodeCountTotal_odd;
        private final int layerCount;
        @Constant
        private final int[] layerNodeCount;
        @Constant
        private final int[] layerNodeIndex0;
//        private final float[] nodes_even;
//        private final float[] nodes_odd;
        //
        private final boolean sharedWeights;
//        private final int weightCountTotal;
        @Constant
        private final int[] layerInputCountPerNode;
        @Constant
        private final int[] layerWeightIndex0;
//        private final float[] weights;
        //
        @Constant
        private final int[] layerBiasIndex0;
        private final boolean sharedBias;
//        private final float[] bias;
        //

        protected FeedFwdNetRunner(FeedFwdNetConfig config) {
            super(config);
            //
            //Initialize nodes
            int nodeCnt_even = 0;
            int nodeCnt_odd = 0;
            boolean even = true;
            layerCount = config.getLayerCount();
            layerNodeCount = new int[layerCount];
            layerNodeIndex0 = new int[layerCount - 2];
            for (int i = 0; i < layerCount; i++) {
                layerNodeCount[i] = config.getLayerNodeCount(i);
                if (even) {
                    if (i > 1) {
                        layerNodeIndex0[i - 2] = nodeCnt_even;
                    }
                    nodeCnt_even += layerNodeCount[i];
                } else {
                    if (i > 1) {
                        layerNodeIndex0[i - 2] = nodeCnt_odd;
                    }
                    nodeCnt_odd += layerNodeCount[i];
                }
                even = !even;
            }
            
            sharedWeights = config.hasSharedWeights();
            layerInputCountPerNode = new int[layerCount - 1];
            layerWeightIndex0 = new int[layerCount - 2];
            int wCnt = 0;
            for (int i = 0; i < layerCount - 1; i++) {
                layerInputCountPerNode[i] = config.getLayerWeightCount(i);
                if (i > 0) {
                    layerWeightIndex0[i - 1] = wCnt;
                }
                if (sharedWeights) {
                    wCnt += layerInputCountPerNode[i];
                } else {
                    wCnt += layerInputCountPerNode[i] * layerNodeCount[i + 1];
                }
            }
//            weightCountTotal = wCnt;
//            weights = new float[wCnt];
            int w0 = 0;
            for (int lay = 0; lay < layerCount - 1; lay++) {
                if (lay > 0) {
                    w0 = layerWeightIndex0[lay - 1];
                }
                if (sharedWeights) {
                    for (int w = 0; w < layerInputCountPerNode[lay]; w++) {
                        set_weight(w0 + w, config.getSharedInputWeight(lay, w));
                    }
                } else {
                    for (int n = 0; n < layerNodeCount[lay + 1]; n++) {
                        w0 += n * layerInputCountPerNode[lay];
                        for (int w = 0; w < layerInputCountPerNode[lay]; w++) {
                            set_weight(w0 + w, config.getInputWeight(lay, n, w));
                        }
                    }
                }
            }
            //
            //Initialise bias
            sharedBias = config.hasSharedBias();
            layerBiasIndex0 = new int[layerCount - 1];
            if (sharedBias) {
                //bias = new float[layerCount];
                for (int lay = 0; lay < layerCount; lay++) {
                    set_bias(lay, config.getSharedBias(lay));
                    if (lay > 0) {
                        layerBiasIndex0[lay - 1] = lay;
                    }
                }
            } else {
                //bias = new float[nodeCountTotal];
                int bSum = 0;
                for (int lay = 0; lay < layerCount; lay++) {
                    if (lay > 0) {
                        layerBiasIndex0[lay - 1] = bSum;
                    }
                    for (int n = 0; n < layerNodeCount[lay]; n++) {
                        set_bias(bSum + n, config.getBias(lay, n));
                        bSum++;
                    }
                }
            }
            setExplicit(true);
        }

        @Override
        public final void run() {
            int layer0 = Layer0[0];
            int mode = Mode[0];


        }

        protected abstract void runML(int mode, int layer);

        protected void feedFwd(int layer) {
            int node = getGlobalId();
            int inCnt = getInputPerNode(layer);
            float sum = 0;
            for (int i = 0; i < inCnt; i++) {
                sum += getInputAmt(layer, node, i);
            }
            sum += getBias(layer, node);
            setNodeVal(layer, node, f(sum));
        }

        public abstract float f(float x);

        public abstract float df(float fx);

        /*
         * Getters and Setters
         */

        protected int getLayerCount() {
            return layerCount;
        }

        protected boolean isSharedWeights() {
            return sharedWeights;
        }

        protected boolean isSharedBias() {
            return sharedBias;
        }

        protected int getInputPerNode(int layer) {
            if (layer > 0) {
                return layerInputCountPerNode[layer - 1];
            }
            return 0;
        }

        protected int getFirstWeightIndex(int layer0) {
            if (layer0 > 0) {
                return layerWeightIndex0[layer0 - 1];
            }
            return 0;
        }

        protected int getFirstNodeIndex(int layer) {
            if (layer > 1) {
                return layerNodeIndex0[layer - 2];
            }
            return 0;
        }

        protected int getBiasNodeIndex(int layer) {
            if (sharedBias) {
                return layer;
            }
            if (layer > 0) {
                return layerBiasIndex0[layer - 1];
            }
            return 0;
        }

        protected float getBias(int layer, int node) {
            if (sharedBias) {
                return get_bias(layer);
            }
            return get_bias(getBiasNodeIndex(layer) + node);
        }

        protected void setBias(int layer, int node, float val) {
            if (sharedBias) {
                set_bias(layer, val);
                return;
            }
            set_bias(getBiasNodeIndex(layer) + node, val);
        }

        protected float getInputWeight(int layer, int node, int w) {
            if (sharedWeights) {
                return get_weight(getFirstWeightIndex(layer - 1) + w);
            }
            return get_weight(getFirstWeightIndex(layer - 1) + node * getInputPerNode(layer) * w);
        }

        protected void setInputWeight(int layer, int node, int w, float val) {
            if (sharedWeights) {
                set_weight(getFirstWeightIndex(layer - 1) + w, val);
                return;
            }
            set_weight(getFirstWeightIndex(layer - 1) + node * getInputPerNode(layer) * w, val);
        }

        protected abstract int getInputSrcNode(int layer, int node, int w);

        protected float getInputAmt(int layer, int node, int w) {
            return getNodeVal(layer - 1, getInputSrcNode(layer, node, w))
                    * getInputWeight(layer, node, w);
        }

        protected float getNodeVal(int layer, int n) {
            if (layer % 2 == 0) {
                if (layer == 0) {
                    return get_node_even(n);
                }
                return get_node_even(getFirstNodeIndex(layer) + n);
            }
            if (layer == 1) {
                return get_node_odd(n);
            }
            return get_node_odd(getFirstNodeIndex(layer) + n);
        }

        protected void setNodeVal(int layer, int n, float val) {
            if (layer % 2 == 0) {
                if (layer == 0) {
                    set_node_even(n, val);
                    return;
                }
                set_node_even(getFirstNodeIndex(layer) + n, val);
                return;
            }
            if (layer == 1) {
                set_node_odd(n, val);
                return;
            }
            set_node_odd(getFirstNodeIndex(layer) + n, val);
        }

        protected void copyIn() {
            int i = getGlobalId();
            if (i < layer0Size) {
                set_node_even(i, layer0Nodes[i]);
            }
        }

        protected void copyOut() {
            int i = getGlobalId();
            if (i < layerNSize) {
                if (layerCount % 2 == 1) {
                    layerNOutput[i] = get_node_even(nodeCountTotal - layerNSize + i);
                } else {
                    layerNOutput[i] = get_node_odd(nodeCountTotal - layerNSize + i);
                }
            }
        }
    }
}
