package tetris.agent;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;

import tetris.simulator.State;
import tetris.simulator.Visualizer;
import tetris.features.*;
import jMEF.*;

import java.util.List;
import java.io.*;

class WeightValuePair {
  public double[] weight;
  public double value;
}

class WeightValuePairComparator implements Comparator<WeightValuePair> {
  public int compare(WeightValuePair object1, WeightValuePair object2) {
    if (object1.value < object2.value)
      return 1;
    else
      return -1;
  }
}

public class CrossEntropy implements Runnable {  
  private final int numSamples; // number of samples
  private final double rho; // fraction retained

  protected PVectorMatrix meanVariance;
  private MultivariateGaussian multivarGaussian;

  int nEpoch;

  public CrossEntropy() {
    numSamples = 100;
    rho = 0.1;

    int featureDim = getFeatureVector(new State()).length;
    meanVariance = new PVectorMatrix(featureDim);
    Arrays.fill(meanVariance.v.array, 0);
    for (int i = 0; i < featureDim; i++) {
      meanVariance.M.array[i][i] = 100.0;
    }
    multivarGaussian = new MultivariateGaussian();
    nEpoch = 1;
  }

  State forwardSimulate (State inputState, int action) {
    int move = Math.max(0, Math.min(action, inputState.legalMoves().length - 1));
    State outputState = new State(inputState);
    if (!inputState.hasLost())
      outputState.makeMove(move);
    return outputState;
  }

  double[] getFeatureVector (State inputState) {
    State featureState = new State(inputState);
    double[] double_obs = new double[featureState.getTop().length + 1 + featureState.getTop().length - 1 + 1];
    int counter = 0;
    int max = 0;
    for (int i = 0; i < featureState.getTop().length; i++) {
      double_obs[counter++] = (double)featureState.getTop()[i]; 
      if (featureState.getTop()[i] > max)
        max = featureState.getTop()[i];
    }
    double_obs[counter++] = max;
    for (int i = 1; i < featureState.getTop().length; i++)
      double_obs[counter++] = (double)(featureState.getTop()[i] - featureState.getTop()[i-1]);

    int numHoles = 0;
    for (int i = 0; i < featureState.getTop().length; i++) 
      for (int j = featureState.getTop()[i] - 1; j >= 0; j--) 
        if (featureState.getField()[j][i] == 0)
          numHoles ++;
    double_obs[counter++] = numHoles;
    return double_obs;
  }

  double[] sampleWeight() {
    return multivarGaussian.drawRandomPoint(meanVariance).array;
  }

  double valueFunction (double[] feature, double[] valueWeights) {
    assert(feature.length == valueWeights.length);
    double value = 0;
    for (int i = 0; i < feature.length; i++)
      value += valueWeights[i]*feature[i];
    return value;
  }

  double valueFunction(State inputState,  int action, double[] valueWeights) {
    State outputState = forwardSimulate (inputState, action);
    return valueFunction(getFeatureVector(outputState), valueWeights);
  }

  double getAccumulatedReward (State inputState) {
    return inputState.getRowsCleared();
  }

  double evaluateWeight(double[] valueWeights) {
    State tetrisState = new State();
    //    Visualizer v = new Visualizer(tetrisState);
    //System.out.println(valueWeights[0]+" "+valueWeights[1]+" "+valueWeights[2]+" "+valueWeights[3]+" "+valueWeights[4]+" "+valueWeights[5]+" "+valueWeights[6]+" "+valueWeights[7]+" "+valueWeights[8]+" "+valueWeights[9]);
    while (!tetrisState.hasLost()) {

      //print state
      //      double[] features = getFeatureVector(tetrisState);
      //      System.out.println(" feat: ");
      //      for (int i = 0; i < features.length; i++)
      //        System.out.print(features[i]+" ");
      //      System.out.println(" wt: ");
      //      for (int i = 0; i < features.length; i++)
      //        System.out.print(valueWeights[i]+" ");
      //      System.out.println(" ");

      double bestValue = Double.NEGATIVE_INFINITY;
      int bestAction = 0;
      for (int i = 0; i < tetrisState.legalMoves().length; i++) {
        double value = valueFunction(tetrisState, i, valueWeights);
        if (value > bestValue) {
          bestValue = value;
          bestAction = i;
        }
      }
      tetrisState.makeMove(bestAction);
      //      v.draw();
      //      v.drawNext(0,0);
      //      try {
      //        Thread.sleep(10);
      //      } catch (InterruptedException e) {
      //        e.printStackTrace();
      //      }
    }
    //    v.dispose();
    //System.out.println(String.format("Epoch %d: %d rows cleared", nEpoch, tetrisState.getRowsCleared()));
    return getAccumulatedReward(tetrisState);
  }


  @Override
  public void run() {
    while (true) {
      List<WeightValuePair> sampleList = new ArrayList<WeightValuePair>();
      // Sample and evaluate 
      for (int i = 0; i < numSamples; i++) {
        WeightValuePair pair = new WeightValuePair();
        pair.weight = sampleWeight();
        for (int turn = 0; turn < 1; turn ++) 
          pair.value += evaluateWeight(pair.weight)/1.0;
        sampleList.add(pair);
        //System.out.println("Epoch: "+nEpoch+" Sample: "+i+"Score: "+pair.value);
      }
      Collections.sort(sampleList, new WeightValuePairComparator());

      // Retain some and update mean and variance
      int retain = (int)(rho*numSamples);
      double[] avg = new double[meanVariance.dim];
      double[][] var_avg = new double[meanVariance.dim][meanVariance.dim];

      int counter = 0;
      for (int i = 0; i < retain; i++) {
        if (sampleList.get(i).value <= 0)
          break;
        counter++;
        for (int p = 0; p < avg.length; p++) {
          avg[p] += sampleList.get(i).weight[p];
        }
      }
      if (counter > 0) {
        for (int p = 0; p < avg.length; p++) {
          meanVariance.v.array[p] = avg[p]/counter;
        }
      }

      if (counter == retain) {
        for (int i = 0; i < retain; i++) {
          for (int p = 0; p < meanVariance.v.array.length; p++) {
            for (int q = 0; q < meanVariance.v.array.length; q++) {
              var_avg[p][q] +=  (sampleList.get(i).weight[p] - meanVariance.v.array[p])*(sampleList.get(i).weight[q] - meanVariance.v.array[q]);
            }
          }
        }
        for (int p = 0; p < meanVariance.v.array.length; p++) {
          for (int q = 0; q < meanVariance.v.array.length; q++) {
            //System.out.println(meanVariance.M.array[p][q]+" "+var_avg[p][q] / ((double)retain));
            meanVariance.M.array[p][q] = 0.0*meanVariance.M.array[p][q] +  (1.0/retain)*var_avg[p][q];  
            if (p == q)
              meanVariance.M.array[p][q] += 4;
          }
        }
      }
      
      double mean_score = 0;
      for (int turn = 0; turn < 30; turn ++) 
        mean_score += evaluateWeight(meanVariance.v.array)/30.0;
      System.out.println("Epoch: "+nEpoch+"Score: "+mean_score);


      nEpoch++;
      try {
        PrintStream output = new PrintStream(new File("mean_variance_cross_entropy.txt"));
        for (int i = 0; i < meanVariance.v.array.length; i++) {
          output.print(meanVariance.v.array[i]+" ");
        }
        output.println(" ");
        for (int i = 0; i < meanVariance.v.array.length; i++) {
          for (int j = 0; j < meanVariance.v.array.length; j++) {
            output.print(meanVariance.M.array[i][j]+" ");
          }
          output.println(" ");
        }
      }catch(FileNotFoundException e) {
        e.printStackTrace();
      }
      
    }
  }
}