import java.util.ArrayList;
import java.util.HashMap;

public class CascadeCorr extends NeuralNet {
	
	public CascadeCorr(String[][][] l) {
    super(l, 1); 
  }

  public static void main(String[] args) {
    String[][] l = {{"B","L","L"},{"S"}};
    CascadeCorr c = new CascadeCorr(NeuralNet.quickCPPN(l));
		double[][][] x = {{{0,0},{0},{0}},{{0,1},{1},{0}},{{1,0},{1},{0}},{{1,1},{0},{0}}};

    double momentum = 0.7;
    double learnErr = 1.0;
    double learnCorr = 1.0;
    double e = c.convergeOutputCon(x, learnErr, momentum, Math.pow(10,-8));
		int i = 0;
		while (e > .01) {
			e = c.cascade(x, learnErr, learnCorr, momentum);
			i++;
			System.out.println("\n\n"+i+"\t"+e+"\n\n");
		} 		
    System.out.println("Iterations: "+i);
		for (int j = 0; j < x.length; j++) { 
			c.processInput(x[j][0]);
			double[] y = c.getOutputs();
			System.out.println(x[j][0][0]+" , "+x[j][0][1]+" : "+y[0]);
		}
  }

  public double cascade(double[][][] examples, double learnErr, double learnCorr, double momentum) {
    double[] coords = new double[2];
    Candidate can = new Candidate(new SigmoidNode(coords), this);
    double S = Double.NEGATIVE_INFINITY;
    double SPrev = 0;
    do {
      SPrev = S;
      S = can.correlate(this, examples, learnCorr);
      //**/can.check(this, examples);
      //**/System.out.println("S: "+S+"\t"+(S-SPrev));
      //Abs should not be needed (corr can be pos or neg?)
    } while (Math.abs(S - SPrev) > Math.pow(10,-9));
    installCandidate(can);
    return convergeOutputCon(examples, learnErr, momentum, Math.pow(10,-8));
  }

  public double convergeOutputCon(double[][][] examples, double learnRate, double momentum, double thres) {
    double err = Double.POSITIVE_INFINITY;
    double errPrev = 0;
    do {
      errPrev = err;
      err = trainOutputCon(examples, learnRate, momentum);
      //System.out.println("err: "+err);
    } while (errPrev - err > Math.pow(10,-9));
    return err;
  }
	
	public double trainOutputCon(double[][][] examples, double learnRate, double momentum) {
		double t = 0;
		for (int i = 0; i < examples.length; i++) {
			processInput(examples[i][0]);
			//Reset output deltas
			for (int j = 0; j < network.get(network.size()-1).size(); j++) {
				network.get(network.size()-1).get(j).resetDelta();
			}
			//Compute error
			for (int j = 0; j < examples[i][1].length; j++) {
				int k = (int) examples[i][2][j];
				t += network.get(network.size()-1).get(k).computeError(examples[i][1][j]);
			}
			//Start at second to last layer and propagate the error backwards
			for (int j = network.size()-2; j >= 0 ; j--) {
				for (int k = 0; k < network.get(j).size(); k++) {
					network.get(j).get(k).propagateError();
				}
			}
		}
		//Update weights
		learnRate *= (1.0/examples.length);	
		for (int i = 0; i < network.size(); i++) {
			for (int j = 0; j < network.get(i).size(); j++) {
				network.get(i).get(j).updateWeights(learnRate, momentum, true);
			}
		}
    return t;
	}

  public void installCandidate(Candidate can) {
    int idx = network.size()-1;
    network.add(idx, new ArrayList<Node>());
    network.get(idx).add(can.node);

    for (int i = 0; i < size()-2; i++) {
      for (int j = 0; j < size(i); j++) {
        addConnection(i,j,idx,0);
        Node nx = getNode(i,j);
        nx.setWeight(nx.size()-1,can.cons.get(nx)[0]);
      }
    }
    for (int i = 0; i < size(idx+1); i++) {
      addConnection(idx,0,idx+1,i);
    }
  }

  //NB Only works with single output nodes!!!
  private class Candidate {
    private Node node;
    private double S;
    private double avgErr;
    private double avgVal;
    private HashMap<Node, double[]> cons;

    public Candidate(Node cand, NeuralNet net) {
      node = cand;
      S = 0;
      avgErr = 0;
      avgVal = 0;
      cons = new HashMap<Node, double[]>();
      for (int i = 0; i < net.size()-1; i++) {
        for (int j = 0; j < net.size(i); j++) {
          //0 = weight //1 = weightDiff
          double[] w = new double[2];
          w[0] = 0.6*Math.random() - 0.3;
          cons.put(net.getNode(i, j), w);
        }
      }
    }

    public double sign() {
      if (S >= 0) return 1.0;
      else return -1.0;
    }

    //Squared or abs err??
    public double error(double out, double tar) {
      return out - tar;
    }

    public double computeS(NeuralNet net, double[][][] examples) {
      //Compute avg err and val
      avgErr = 0;
      avgVal = 0;
      for (int k = 0; k < examples.length; k++) {
        net.processInput(examples[k][0]);
        avgErr += error(net.getOutputs()[0], examples[k][1][0]) / examples.length;
        double t = 0;
        for (int i = 0; i < net.size()-1; i++) {
          for (int j = 0; j < net.size(i); j++) {
            t += getNode(i,j).getOutput()*cons.get(getNode(i,j))[0];
          }
        }
        avgVal += node.activationDeriv(t) / examples.length;
      }
      S = 0;
      //Compute S
      for (int k = 0; k < examples.length; k++) {
        net.processInput(examples[k][0]);
        double err = error(net.getOutputs()[0], examples[k][1][0]);
        double t = 0;
        for (int i = 0; i < net.size()-1; i++) {
          for (int j = 0; j < net.size(i); j++) {
            t += getNode(i,j).getOutput()*cons.get(getNode(i,j))[0];
          }
        }
        double val = node.activationDeriv(t);
        S += (val - avgVal)*(err - avgErr);
      }
      return S;
    }
    
    public double correlate(NeuralNet net, double[][][] examples, double learnRate) {
      computeS(net, examples);
      for (int k = 0; k < examples.length; k++) {
        net.processInput(examples[k][0]);
        //Compute val
        double t = 0;
        for (int i = 0; i < net.size()-1; i++) {
          for (int j = 0; j < net.size(i); j++) {
            t += getNode(i,j).getOutput()*cons.get(getNode(i,j))[0];
            //Coment this line out to fix, but why?
            //**/cons.get(getNode(i,j))[1] = 0;
          }
        }
        double val = node.activationDeriv(t);
        double err = error(net.getOutputs()[0], examples[k][1][0]);
        //Compute weightDiff
        for (int i = 0; i < net.size()-1; i++) {
          for (int j = 0; j < net.size(i); j++) {
            Node nx = getNode(i,j);
            cons.get(nx)[1] += sign()*(err - avgErr)*val*(nx.getOutput()*cons.get(nx)[0]);
          }
        }
      }
      for (int i = 0; i < net.size()-1; i++) {
        for (int j = 0; j < net.size(i); j++) {
          //Gradient ascent so -=??
          /**/cons.get(net.getNode(i,j))[0] += learnRate*cons.get(net.getNode(i,j))[1];
        }
      }
      return S;
    }
    
    public void check(NeuralNet net, double[][][] examples) {
      double eps = 0.001;
      double t = 0;
      
      for (int i = 0; i < net.size() - 1; i++) {
        for (int j = 0; j < net.size(i); j++) {
          Node n = net.getNode(i, j);
          double w = cons.get(n)[0];
          double d = cons.get(n)[1];
          cons.get(n)[0] = w - eps;
          double r1 = computeS(net, examples);
          cons.get(n)[0] = w + eps;
          double r2 = computeS(net, examples);
          cons.get(n)[0] = w;
          double g = (r1 - r2) / (2*eps);
          t += ((double) Math.abs(g - d));
          System.out.println(d+"\t"+g+"\t"+(g/d));
        }
      }
      System.out.println("Total err: "+t);
    }
  }
}
