import java.util.ArrayList;
import java.util.HashMap;

public class CascadeCorr3 extends NeuralNet {
	
	public CascadeCorr3(String[][][] l) {
    super(l, 1); 
  }

  int nodeCount;

  public static void main(String[] args) {
    String[][] l = {{"B","L","L"},{"S"}};
    CascadeCorr3 c = new CascadeCorr3(NeuralNet.quickCPPN(l));
    c.nodeCount = 1;
    double[][][][] data = dataSplit(parity(2),1);

    double momentum = 0.7;
    double learnErr = 1.0;
    double learnCorr = 1.0;
    double e = c.convergeOutputCon(data[0], learnErr, momentum, Math.pow(10,-8));
		int i = 0;
		while (e > .01) {
			e = c.cascade(data[0], learnErr, learnCorr, momentum);
			i++;
      c.nodeCount++;
			System.out.println("\n\n"+i+"\t"+e+"\n\n");
		} 		
    System.out.println("Iterations: "+i);
    System.out.println("Train: "+ c.evaluate(data[0]));
    System.out.println("Test: "+ c.evaluate(data[1]));
		for (int j = 0; j < data[0].length; j++) { 
			c.processInput(data[0][j][0]);
			double[] y = c.getOutputs();
			System.out.println(data[0][j][0][0]+" , "+data[0][j][0][1]+" : "+y[0]);
		}
  }

  public double cascade(double[][][] examples, double learnErr, double learnCorr, double momentum) {
    double[] coords = new double[2];
    Candidate[] cans = createCandidates(3, this);
    Candidate bestCan = cans[0];
    double bestS = Double.NEGATIVE_INFINITY;
    for (int i = 0; i < cans.length; i++) {
      double S = Double.NEGATIVE_INFINITY;
      double SPrev = 0;
      int x = 0;
      int y = 0;
      do {
        SPrev = S;
        S = cans[i].altCorrelate(this, examples, learnCorr);
        //**/cans[i].check(this, examples);
        //**/System.out.println("S: "+S+"\t"+(S-SPrev));
        x++;
        if (S < SPrev) y++;
      } while (Math.abs(S - SPrev) > Math.pow(10,-8));
      if (S > bestS) {
        bestCan = cans[i];
        bestS = S;
      }
      /*for (int j = 0; j < this.size()-1; j++) {
        for (int k = 0; k < this.size(j); k++) {
          Node nx = getNode(j,k);
          System.out.println(nx+"\t"+cans[i].getCons(nx)[0]+" "+cans[i].getCons(nx)[1]);
        }
      }*/
      System.out.println(S+"\t"+x+"\t"+y);//+"\n");
    }
    System.out.println("Best S: "+bestS+"\n");
    installCandidate(bestCan);
    return convergeOutputCon(examples, learnErr, momentum, Math.pow(10,-8));
  }

  public double convergeOutputCon(double[][][] examples, double learnRate, double momentum, double thres) {
    double err = Double.POSITIVE_INFINITY;
    double errPrev = 0;
    do {
      errPrev = err;
      err = trainOutputCon(examples, learnRate, momentum);
      //System.out.println("err: "+err);
    } while (errPrev - err > thres);
    return err;
  }
	
	public double trainOutputCon(double[][][] examples, double learnRate, double momentum) {
		double t = 0;
		for (int i = 0; i < examples.length; i++) {
			processInput(examples[i][0]);
			//Reset output deltas
			for (int j = 0; j < network.get(network.size()-1).size(); j++) {
				network.get(network.size()-1).get(j).resetDelta();
			}
			//Compute error
			for (int j = 0; j < examples[i][1].length; j++) {
				int k = (int) examples[i][2][j];
				t += network.get(network.size()-1).get(k).computeError(examples[i][1][j]);
			}
			//Start at second to last layer and propagate the error backwards
			for (int j = network.size()-2; j >= 0 ; j--) {
				for (int k = 0; k < network.get(j).size(); k++) {
					network.get(j).get(k).propagateError();
				}
			}
		}
		//Update weights
		learnRate *= (1.0/examples.length);	
		for (int i = 0; i < network.size(); i++) {
			for (int j = 0; j < network.get(i).size(); j++) {
				network.get(i).get(j).updateWeights(learnRate, momentum, true);
			}
		}
    return t;
	}
    
  public Candidate[] createCandidates(int amount, NeuralNet net) {
    String[] types = {"S"};
    Candidate[] cans = new Candidate[amount*types.length];
    for (int i = 0; i < types.length; i++) {
      for (int j = 0; j < amount; j++) {
        double[] c = new double[2];
        Node n;
        if (types[i].equals("S")) n = new SigmoidNode(c);
        else if (types[i].equals("G")) n = new GaussianNode(c);
        else n = new SigmoidNode(c);

        cans[i*amount + j] = new Candidate(n, net);
      }
    }
    return cans;
  }

  public void installCandidate(Candidate can) {
    int idx = network.size()-1;
    network.add(idx, new ArrayList<Node>());
    network.get(idx).add(can.node);

    for (int i = 0; i < size()-2; i++) {
      for (int j = 0; j < size(i); j++) {
        addConnection(i,j,idx,0);
        Node nx = getNode(i,j);
        nx.setWeight(nx.size()-1,can.cons.get(nx)[0]);
      }
    }
    for (int i = 0; i < size(idx+1); i++) {
      addConnection(idx,0,idx+1,i);
    }
  }

  public static double[][][] parity(int bits) {
    int tot = (int) Math.pow(2,bits);
    double[][][] data = new double [tot][3][];
    for (int i = 0; i < tot; i++) {
      data[i][0] = new double[bits];
      data[i][1] = new double[1];
      data[i][2] = new double[1];
      data[i][2][0] = 0;
      int val = i;
      int ham = 0;
      for (int j = 0; j < bits; j++) {
        int exp = bits-1-j;
        double pow = (int) Math.pow(2,exp);
        if (val >= pow) {
          data[i][0][j] = 1;
          val -= pow;
          ham++;
        } else {
          data[i][0][j] = 0;
        }
        //System.out.print((int) data[i][0][j]);
      }
      if (ham % 2 == 0) data[i][1][0] = 0;
      else data[i][1][0] = 1;
      //System.out.print("\t"+data[i][1][0]+"\n");
    }
    return data;
  }

  public static double[][][][] dataSplit(double[][][] data, int frac) {
    double[][][][] split = new double[2][][][];
    int train = data.length / frac;
    split[0] = new double[train][][];
    split[1] = new double[data.length-train][][];
    int trI = 0;
    int teI = 0;
    for (int i = 0; i < data.length; i++) {
      if (i % frac == 0) {
        split[0][trI] = data[i];
        trI++;
      } else {
        split[1][teI] = data[i];
        teI++;
      }
    }
    return split;
  }


  //NB Only works with single output nodes!!!
  private class Candidate {
    private Node node;
    private double S;
    private double avgErr;
    private double avgVal;
    private HashMap<Node, double[]> cons;
    private double sign = 1;

    public Candidate(Node cand, NeuralNet net) {
      node = cand;
      S = 0;
      avgErr = 0;
      avgVal = 0;
      cons = new HashMap<Node, double[]>();
      for (int i = 0; i < net.size()-1; i++) {
        for (int j = 0; j < net.size(i); j++) {
          //0 = weight //1 = weightDiff
          double[] w = new double[2];
          w[0] = 0.6*Math.random() - 0.3;
          cons.put(net.getNode(i, j), w);
        }
      }
    }

    public double[] getCons(Node n) {
      return cons.get(n);
    }

    public double sign(double canVal, double outVal) {
      if (!(canVal >= 0 ^ outVal >= 0)) return 1.0;
      else return -1.0;
    }

    public double error(double out, double tar, NeuralNet net) {
      return (out - tar)*net.getNode(net.size()-1,0).activationDeriv(net.getNode(net.size()-1,0).input);
    }

    public double computeS(NeuralNet net, double[][][] examples) {
      //Compute avg err and val
      avgErr = 0;
      avgVal = 0;
      for (int k = 0; k < examples.length; k++) {
        net.processInput(examples[k][0]);
        avgErr += error(net.getOutputs()[0], examples[k][1][0], net) / examples.length;
        double t = 0;
        for (int i = 0; i < net.size()-1; i++) {
          for (int j = 0; j < net.size(i); j++) {
            t += getNode(i,j).getOutput()*cons.get(getNode(i,j))[0];
          }
        }
        avgVal += node.activationFunc(t) / examples.length;
      }
      S = 0;
      //Compute S
      for (int k = 0; k < examples.length; k++) {
        net.processInput(examples[k][0]);
        double err = error(net.getOutputs()[0], examples[k][1][0], net);
        double t = 0;
        for (int i = 0; i < net.size()-1; i++) {
          for (int j = 0; j < net.size(i); j++) {
            t += getNode(i,j).getOutput()*cons.get(getNode(i,j))[0];
          }
        }
        double val = node.activationFunc(t);
        S += (val - avgVal)*(err - avgErr);
      }
      sign = S / Math.abs(S);
      S = Math.abs(S);
      return S;
    }
    
    public double correlate(NeuralNet net, double[][][] examples, double learnRate) {
      computeS(net, examples);
      for (int k = 0; k < examples.length; k++) {
        net.processInput(examples[k][0]);
        //Compute deriv
        double t = 0;
        for (int i = 0; i < net.size()-1; i++) {
          for (int j = 0; j < net.size(i); j++) {
            t += getNode(i,j).getOutput()*cons.get(getNode(i,j))[0];
            cons.get(getNode(i,j))[1] = 0;
          }
        }
        double deriv = node.activationDeriv(t);
        double err = error(net.getOutputs()[0], examples[k][1][0], net);
        //Compute weightDiff
        for (int i = 0; i < net.size()-1; i++) {
          for (int j = 0; j < net.size(i); j++) {
            Node nx = getNode(i,j);
            cons.get(nx)[1] += sign*(err - avgErr)*deriv*nx.getOutput();
          }
        }
      }
      for (int i = 0; i < net.size()-1; i++) {
        for (int j = 0; j < net.size(i); j++) {
          cons.get(net.getNode(i,j))[0] -= (learnRate / (examples.length*nodeCount))*cons.get(net.getNode(i,j))[1];
        }
      }
      return S;
    }
    
    public void check(NeuralNet net, double[][][] examples) {
      double eps = 0.001;
      double t = 0;
      
      for (int i = 0; i < net.size() - 1; i++) {
        for (int j = 0; j < net.size(i); j++) {
          Node n = net.getNode(i, j);
          double w = cons.get(n)[0];
          double d = cons.get(n)[1];
          cons.get(n)[0] = w - eps;
          double r1 = computeS(net, examples);
          cons.get(n)[0] = w + eps;
          double r2 = computeS(net, examples);
          cons.get(n)[0] = w;
          double g = (r1 - r2) / (2*eps);
          t += ((double) Math.abs(g - d));
          System.out.println(d+"\t"+g+"\t"+(g/d));
        }
      }
      System.out.println("Total err: "+t);
    }


    public double altCorrelate(NeuralNet net, double[][][] examples, double learnRate) {
      double eps = 0.001;
      double t = 0;
      
      for (int i = 0; i < net.size() - 1; i++) {
        for (int j = 0; j < net.size(i); j++) {
          Node n = net.getNode(i, j);
          double w = cons.get(n)[0];
          double d = cons.get(n)[1];
          cons.get(n)[0] = w - eps;
          double r1 = computeS(net, examples);
          cons.get(n)[0] = w + eps;
          double r2 = computeS(net, examples);
          cons.get(n)[0] = w;
          cons.get(net.getNode(i,j))[0] -= learnRate*((r1 - r2) / (2*eps));
        }
      }
      return computeS(net, examples);
    }
  }
}
