package org.apache.giraph.benchmark;

/*
 * Random Forest: a collection of unpruned decision trees
 * 
 * Random Forest Vertex Documentation TODO
 */

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Random;
import java.util.StringTokenizer;

import net.sf.javaml.classification.Classifier;
import net.sf.javaml.core.Dataset;
import net.sf.javaml.core.DefaultDataset;
import net.sf.javaml.core.DenseInstance;
import net.sf.javaml.core.Instance;
import net.sf.javaml.tools.weka.WekaClassifier;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.giraph.graph.BasicVertex;
import org.apache.giraph.graph.Edge;
import org.apache.giraph.graph.GiraphJob;
import org.apache.giraph.graph.MutableVertex;
import org.apache.giraph.graph.Vertex;
import org.apache.giraph.graph.VertexReader;
import org.apache.giraph.graph.VertexWriter;
import org.apache.giraph.lib.TextVertexInputFormat;
import org.apache.giraph.lib.TextVertexOutputFormat;
import org.apache.giraph.lib.TextVertexInputFormat.TextVertexReader;
import org.apache.giraph.lib.TextVertexOutputFormat.TextVertexWriter;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import weka.classifiers.trees.J48;

public class RandomForestVertex extends
Vertex<LongWritable, DoubleWritable, FloatWritable, MapWritable>
implements Tool {

	/* How many supersteps to run */
	public static String ATTRIBUTE_COUNT = "RandomForestVertex.attributeCount";

	/* How many classification trees to create */
	public static String FOREST_SIZE = "RandomForestVertex.forestSize";
	
	/* Size of training data set */
	public static String TRAINING_DATA_CASES = "RandomForestVertex.trainingDataCases";
	
	/* Type of vertex: Representative, Training, Testing, Classifier */
	private String vertexType;
	
	private MapWritable vertexData = new MapWritable();
	
	public void setVertextType(String type) {
		this.vertexType = type;
	}
	
	public void addVertexDataEntry(Writable key, Writable value) {
		this.vertexData.put(key, value);
	}
	
	/* Random forest generation 
	public static ArrayList<ArrayList<Integer>> 
	trainingSubsetWithReplacement(ArrayList<ArrayList<Integer>> data) {
		
		ArrayList<ArrayList<Integer>> subset = new ArrayList<ArrayList<Integer>>();
		
		Random r = new Random();
		for (int i = 0; i < data.size(); i++) {
			subset.add(data.get(r.nextInt(data.size())));
		}
		
		return subset;
	}
	
	public static Dataset classifierDataFormat (ArrayList<ArrayList<Integer>> data) {
		Dataset d = new DefaultDataset();
		for (ArrayList<Integer> values : data) {
			int result = values.get(0);
			double[] valueset = new double[values.size() - 1];
			
			int pos = 0;
			for (int i = 1; i < values.size(); i++) {
				valueset[pos] = (double)values.get(i);
				pos++;
			}
			if (valueset.length != 22)
				System.out.println(valueset.length);
			Instance instance = new DenseInstance(valueset, result);
			d.add(instance);
		}
		
		return d;
	}
	
	public static Classifier buildDecisionTree (Dataset data) {
		// Initialize new Weka C45 tree
		J48 tree = new J48();
		tree.setUnpruned(true);
		
		// Create java-ml Classifier Bridge
		Classifier bridge = new WekaClassifier(tree);
		
		// Build classifier with dataset
		bridge.buildClassifier(data);
		
		// Return classifier
		return bridge;
	}
	End random forest generation */
	
	@Override
	public void compute(Iterator<MapWritable> msgIterator) throws IOException {
		
		/* Start representative vertex behavior */
		if (this.vertexType == "R") { 
			// Placeholder
			voteToHalt();
		}
		/* End representative vertex behavior */
		
		/* Start classifier vertex behavior */
		if (this.vertexType == "C") {
			// Placeholder
			voteToHalt();
		}
		/* End classifier vertex behavior */
		
		/* Start training vertex behavior */
		if (this.vertexType == "TR") {
			
		}
		/* End training vertex behavior */
		
		/* 
	}

	@Override
	public int run(String[] args) throws Exception {
		Options options = new Options();
		options.addOption("h", "help", false, "Help");
		options.addOption("w",
				"workers",
				true,
		"Number of workers");
		options.addOption("i",
				"input file",
				true,
		"Input data file");
		options.addOption("o",
				"output",
				true,
		"Output file");
		options.addOption("n",
				"training cases",
				true,
		"Number of training cases");
		options.addOption("m",
				"attributes",
				true,
		"Number of classification attributes");
		options.addOption("f",
				"trees",
				true,
		"Number of trees in forest");
		
		HelpFormatter formatter = new HelpFormatter();
		if (args.length == 0) {
			formatter.printHelp(getClass().getName(), options, true);
			return 0;
		}
		CommandLineParser parser = new PosixParser();
		CommandLine cmd = parser.parse(options, args);
		if (cmd.hasOption('h')) {
			formatter.printHelp(getClass().getName(), options, true);
			return 0;
		}
		if (!cmd.hasOption('w')) {
			System.out.println("Need to choose the number of workers (-w)");
			return -1;
		}
		if (!cmd.hasOption('i')) {
			System.out.println("Need to set the input training file (-i)");
			return -1;
		}
		if (!cmd.hasOption('n')) {
			System.out.println("Need to set the number of training cases (-n)");
			return -1;
		}
		if (!cmd.hasOption('o')) {
			System.out.println("Need to set the output file (-o)");
			return -1;
		}
		if (!cmd.hasOption('m')) {
			System.out.println("Need to set the number of attributes (-m)");
			return -1;
		}
		if (!cmd.hasOption('f')) {
			System.out.println("Need to choose the size of the forest (-f)");
			return -1;
		}

		// Obtain parameters for forest construction
		int number_of_attributes = Integer.parseInt(cmd.getOptionValue('m'));
		int forest_size = Integer.parseInt(cmd.getOptionValue('f'));
		int training_cases = Integer.parseInt(cmd.getOptionValue('n'));
		int workers = Integer.parseInt(cmd.getOptionValue('w'));
		
		// Create giraph job
		GiraphJob job = new GiraphJob(getConf(), getClass().getName());
		job.setVertexClass(getClass());
		job.setVertexInputFormatClass(RandomForestVertexInputFormat.class);
		job.setVertexOutputFormatClass(RandomForestVertexOutputFormat.class);
		FileInputFormat.addInputPath(job, new Path(cmd.getOptionValue('i')));
		FileOutputFormat.setOutputPath(job, new Path(cmd.getOptionValue('o')));
		job.setWorkerConfiguration(workers, workers, 100.0f);

		// Set Random Forest Job Options
		job.getConfiguration().setInt(ATTRIBUTE_COUNT, number_of_attributes);
		job.getConfiguration().setInt(FOREST_SIZE, forest_size);
		job.getConfiguration().setInt(TRAINING_DATA_CASES, training_cases);

		if (job.run(true) == true) {
			return 0;
		} else {
			return -1;
		}
	}
	
	public static void main(String[] args) throws Exception {
		System.exit(ToolRunner.run(new RandomForestVertex(), args));
	}
	
	public static class RandomForestVertexReader extends
	TextVertexReader<LongWritable, DoubleWritable, FloatWritable> {

		// Instances read so far
		private long verticesRead;
		
		public RandomForestVertexReader(
				RecordReader<LongWritable, Text> lineRecordReader) {
			super(lineRecordReader);
		}

		@Override
		public boolean next(
				MutableVertex<LongWritable, DoubleWritable, FloatWritable, ?> vertex)
				throws IOException, InterruptedException {
			
			/*while((line = train.readLine()) != null) {
				
				StringTokenizer st = new StringTokenizer(line, ",");
				ArrayList<Integer> t = new ArrayList<Integer>();
				
				while (st.hasMoreElements()) {
					t.add(Integer.decode(st.nextToken()));
				}
				
				TrainingSet.add(t);
			}
			
			System.out.println("Reading testing data...");
			BufferedReader test = new BufferedReader(new FileReader("/home/genia/Datasets/Spect/SPECT.test"));
			while((line = test.readLine()) != null) {
				
				StringTokenizer st = new StringTokenizer(line, ",");
				ArrayList<Integer> t = new ArrayList<Integer>();
				
				while (st.hasMoreElements()) {
					t.add(Integer.decode(st.nextToken()));
				}
				
				TestingSet.add(t);
			}
			
			System.out.println("Generating Forests...");
			for (int i = 0; i < 10; i++)
			{
				ArrayList<ArrayList<Integer>> s = trainingSubsetWithReplacement(TrainingSet);
				Dataset d = classifierDataFormat(s);
				Classifier classify = buildDecisionTree(d);
			}
		
			System.out.println("Done");
			
			
			if (!getRecordReader().nextKeyValue()) {
				return false;
			}*/
			
			if(verticesRead == 0) {
				
				// Create a representative vertex which will be connected to all forest vertices
				vertex.setVertexId(new LongWritable(-1L));
				vertex.setVertexValue(new DoubleWritable(0.0d));
				((RandomForestVertex)vertex).setVertextType("R");
				verticesRead++;
				
				return true;
			}
			
			Text line = getRecordReader().getCurrentValue();
			try {
				
				// Read values from input data
				StringTokenizer tokenizer = new StringTokenizer(line.toString(), ",");
				vertex.setVertexId(new LongWritable(verticesRead));
				vertex.setVertexValue(new DoubleWritable(0.0d));
				
				/*FloatWritable[] points = new FloatWritable[tokenizer.countTokens()];

				// 
				int counter = 0;
				while (tokenizer.hasMoreTokens()) {
					points[counter++] = new FloatWritable(Float.parseFloat(tokenizer.nextToken()));
				}

				// 
				((KMeansVertex)vertex).setDatapoints(points);
				((KMeansVertex)vertex).setVertextType("D");
				// from D to R
				vertex.addEdge(new LongWritable(-1L), new FloatWritable(3.0f));
				// from R to D
				vertex.addEdgeRequest(new LongWritable(-1L),
						new Edge<LongWritable, FloatWritable>(
								vertex.getVertexId(), new FloatWritable(1.0f)));*/

				verticesRead++;

			} catch (Exception e) {
				throw new IllegalArgumentException(
						"next: Couldn't get data point from line " + line, e);
			}
			return true;
		}
	}
	
	public static class RandomForestVertexInputFormat extends 
	TextVertexInputFormat<LongWritable, DoubleWritable, FloatWritable> {

		@Override
		public VertexReader<LongWritable, DoubleWritable, FloatWritable> createVertexReader(
				InputSplit split, TaskAttemptContext context) throws IOException {
			return new RandomForestVertexReader(textInputFormat.createRecordReader(split, context));
		}
	}
	
	public static class RandomForestVertexWriter extends
	TextVertexWriter<LongWritable, DoubleWritable, FloatWritable> {

		public RandomForestVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
			super(lineRecordWriter);
		}

		@Override
		public void writeVertex(
				BasicVertex<LongWritable, DoubleWritable, FloatWritable, ?> vertex)
				throws IOException, InterruptedException {		
			RandomForestVertex currentVertex = (RandomForestVertex)vertex;
			String outputStr = currentVertex.getVertexId().toString() + " :";
			if(currentVertex.vertexType == "T") {
				outputStr += "I AM A TREE";
				getRecordWriter().write(new Text(outputStr), null);
				
				// FIXME
			}
		}
		
	}
	
	public static class RandomForestVertexOutputFormat extends
	TextVertexOutputFormat<LongWritable, DoubleWritable, FloatWritable> {

		@Override
		public VertexWriter<LongWritable, DoubleWritable, FloatWritable> createVertexWriter(
				TaskAttemptContext context) throws IOException,
				InterruptedException {
			RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
			return new RandomForestVertexWriter(recordWriter);
		}
	}

}
