package tetris.agent;

import java.util.*;

import tetris.simulator.State;
import tetris.features.*;
import jMEF.*;

import java.util.List;
import java.io.*;

// Building block of cross entropy. A weight vector and the value of the vector.
class WeightValuePair {
  public double[] weight;
  public double value;
}

// Comparator to sort the vector
class WeightValuePairComparator implements Comparator<WeightValuePair> {
  public int compare(WeightValuePair object1, WeightValuePair object2) {
    if (object1.value < object2.value)
      return 1;
    else
      return -1;
  }
}

public class CrossEntropyAgent extends Agent{  
  private final int numSamples; // number of samples
  private final double rho; // fraction retained

  protected PVectorMatrix meanVariance;
  private MultivariateGaussian multivarGaussian;
  Feature feature;

  int nEpoch;
  String filename;

  public CrossEntropyAgent(Feature feature, String filename) {
    numSamples = 100;
    rho = 0.1;

    this.feature = feature;
    int featureDim = feature.getFeatureVector(new State()).size();
    meanVariance = new PVectorMatrix(featureDim);
    Arrays.fill(meanVariance.v.array, 0);
    for (int i = 0; i < featureDim; i++) {
      meanVariance.M.array[i][i] = 100.0;
    }
    multivarGaussian = new MultivariateGaussian();
    nEpoch = 1;
    this.filename = filename;


    try {
      File f = new File(filename);
      if (f.exists()) {
        System.out.println("Loading mean variance from file");
        LoadMeanVariance();
      } else {
        System.out.println("File doesnt exist - will create new");
      }
    }catch(Exception e){
      e.printStackTrace();
    }
  }

  State forwardSimulate (State inputState, int action) {
    int move = Math.max(0, Math.min(action, inputState.legalMoves().length - 1));
    State outputState = new State(inputState);
    if (!inputState.hasLost())
      outputState.makeMove(move);
    return outputState;
  }

  double[] sampleWeight() {
    return multivarGaussian.drawRandomPoint(meanVariance).array;
  }

  double valueFunction (ArrayList<Double> feature, double[] valueWeights) {
    assert(feature.size() == valueWeights.length);
    double value = 0;
    for (int i = 0; i < feature.size(); i++)
      value += valueWeights[i]*feature.get(i).doubleValue();
    return value;
  }

  double valueFunction(State inputState,  int action, double[] valueWeights) {
    State outputState = forwardSimulate (inputState, action);
    return valueFunction(feature.getFeatureVector(outputState), valueWeights);
  }

  double getAccumulatedReward (State inputState) {
    return inputState.getRowsCleared();
  }

  double evaluateWeight(double[] valueWeights) {
    State tetrisState = new State();
    while (!tetrisState.hasLost()) {
      double bestValue = Double.NEGATIVE_INFINITY;
      int bestAction = 0;
      for (int i = 0; i < tetrisState.legalMoves().length; i++) {
        double value = valueFunction(tetrisState, i, valueWeights);
        if (value > bestValue) {
          bestValue = value;
          bestAction = i;
        }
      }
      tetrisState.makeMove(bestAction);
    }
    return getAccumulatedReward(tetrisState);
  }

  public void SaveMeanVariance() {
    try {
      PrintStream output = new PrintStream(new File(filename));
      for (int i = 0; i < meanVariance.v.array.length; i++) {
        output.print(meanVariance.v.array[i]+" ");
      }
      output.println(" ");
      for (int i = 0; i < meanVariance.v.array.length; i++) {
        for (int j = 0; j < meanVariance.v.array.length; j++) {
          output.print(meanVariance.M.array[i][j]+" ");
        }
        output.println(" ");
      }
      output.close();
    }catch(FileNotFoundException e) {
      e.printStackTrace();
    } 
  }

  public void LoadMeanVariance() {
    Scanner scan;
    File file = new File(filename);
    try {
      scan = new Scanner(file);
      for (int i = 0; i < meanVariance.v.array.length; i++) {
        meanVariance.v.array[i] = scan.nextDouble();
      }
      for (int i = 0; i < meanVariance.v.array.length; i++) {
        for (int j = 0; j < meanVariance.v.array.length; j++) {
          meanVariance.M.array[i][j] = scan.nextDouble();
        }
      }
    } catch (FileNotFoundException e1) {
      e1.printStackTrace();
    }
  }


  public void learn() {
    while (true) {
      List<WeightValuePair> sampleList = new ArrayList<WeightValuePair>();
      // Sample and evaluate 
      for (int i = 0; i < numSamples; i++) {
        WeightValuePair pair = new WeightValuePair();
        pair.weight = sampleWeight();
        for (int turn = 0; turn < 1; turn ++) 
          pair.value += evaluateWeight(pair.weight)/1.0;
        sampleList.add(pair);
      }
      Collections.sort(sampleList, new WeightValuePairComparator());

      // Retain some and update mean and variance
      int retain = (int)(rho*numSamples);
      double[] avg = new double[meanVariance.dim];
      double[][] var_avg = new double[meanVariance.dim][meanVariance.dim];

      int counter = 0;
      for (int i = 0; i < retain; i++) {
        if (sampleList.get(i).value <= 0)
          break;
        counter++;
        for (int p = 0; p < avg.length; p++) {
          avg[p] += sampleList.get(i).weight[p];
        }
      }
      if (counter > 0) {
        for (int p = 0; p < avg.length; p++) {
          meanVariance.v.array[p] = avg[p]/counter;
        }
      }

      if (counter == retain) {
        for (int i = 0; i < retain; i++) {
          for (int p = 0; p < meanVariance.v.array.length; p++) {
              var_avg[p][p] +=  (sampleList.get(i).weight[p] - meanVariance.v.array[p])*(sampleList.get(i).weight[p] - meanVariance.v.array[p]);
          }
        }
        for (int p = 0; p < meanVariance.v.array.length; p++) {
            meanVariance.M.array[p][p] = 0.0*meanVariance.M.array[p][p] +  (1.0/retain)*var_avg[p][p] + 4;
        }
      }

      SaveMeanVariance();

      double mean_score = 0;
      for (int turn = 0; turn < 30; turn ++) 
        mean_score += evaluateWeight(meanVariance.v.array)/30.0;
      System.out.println(nEpoch+" "+mean_score);

      nEpoch++;
    }
  }

  public int chooseAction(State s, int[][] legalMoves) {
    double bestValue = Double.NEGATIVE_INFINITY;
    int bestAction = 0;
    for (int i = 0; i < s.legalMoves().length; i++) {
      double value = valueFunction(s, i, meanVariance.v.array);
      if (value > bestValue) {
        bestValue = value;
        bestAction = i;
      }
    }
    return bestAction;
  }
}