package test.kmeans;

import java.io.IOException;
import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;

import cache.io.CacheInputFormat;

public class KMeansClusteringJobCache {

	public static final String PATH_DATA = "files/clustering/import/data";
	public static final String PATH_CENTERS = "files/clustering/import/center/cen.seq";

	private static final Log LOG = LogFactory.getLog(KMeansClusteringJobCache.class);

	public static void main(String[] args) throws IOException,
			InterruptedException, ClassNotFoundException {

		int iteration = 1;
		Configuration conf = new Configuration();
		conf.set("num.iteration", iteration + "");

		Path in = new Path(PATH_DATA);
		Path center = new Path(PATH_CENTERS);
		conf.set("centroid.path", center.toString());
		Path out = new Path("files/clustering/depth_1");

		Job job = new Job(conf);
		job.setJobName("KMeans Clustering");

		job.setMapperClass(KMeansMapper.class);
		job.setReducerClass(KMeansReducer.class);
		job.setJarByClass(KMeansMapper.class);
		

		SequenceFileInputFormat.addInputPath(job, in);
		FileSystem fs = FileSystem.get(conf);
		if (fs.exists(out))
			fs.delete(out, true);

		if (fs.exists(center))
			fs.delete(center, true);

		int k = Integer.parseInt(args[0]);
		initCenter(conf, in, center, fs, k);
		/*
		 * if (fs.exists(in)) fs.delete(out, true);
		 * 
		 * writeData(conf, in, center, fs);
		 */

		SequenceFileOutputFormat.setOutputPath(job, out);
		CacheInputFormat.setDelegateInputFormatData(job, SequenceFileInputFormat.class, PATH_DATA);
		// Replacing job.setInputFormatClass(SequenceFileInputFormat.class);
		job.setInputFormatClass(CacheInputFormat.class);
		job.setOutputFormatClass(SequenceFileOutputFormat.class);

		job.setOutputKeyClass(ClusterCenter.class);
		job.setOutputValueClass(Vector.class);
		// FileOutputFormat.setCompressOutput(job, true);
		// FileOutputFormat.setOutputCompressorClass(job, LzoCodec.class);

		job.waitForCompletion(true);

		long counter = job.getCounters().findCounter(
				KMeansReducer.Counter.CONVERGED).getValue();

		iteration++;
		
		/**
		while (counter > 0) {
			conf = new Configuration();
			conf.set("centroid.path", center.toString());
			conf.set("num.iteration", iteration + "");
			job = new Job(conf);
			job.setJobName("KMeans Clustering " + iteration);

			job.setMapperClass(KMeansMapper.class);
			job.setReducerClass(KMeansReducer.class);
			job.setJarByClass(KMeansMapper.class);

			// in = new Path("files/clustering/depth_" + (iteration - 1) + "/");
			in = new Path(PATH_DATA);
			out = new Path("files/clustering/depth_" + iteration);

			SequenceFileInputFormat.addInputPath(job, in);
			if (fs.exists(out))
				fs.delete(out, true);

			SequenceFileOutputFormat.setOutputPath(job, out);
			job.setInputFormatClass(SequenceFileInputFormat.class);
			job.setOutputFormatClass(SequenceFileOutputFormat.class);
			job.setOutputKeyClass(ClusterCenter.class);
			job.setOutputValueClass(Vector.class);
			// FileOutputFormat.setCompressOutput(job, true);
			// FileOutputFormat.setOutputCompressorClass(job, LzoCodec.class);

			job.waitForCompletion(true);
			iteration++;
			counter = job.getCounters().findCounter(
					KMeansReducer.Counter.CONVERGED).getValue();
			printCenters(conf);
		}
		*/

		/**
		Path result = new Path("files/clustering/depth_" + (iteration - 1)
				+ "/");

		FileStatus[] stati = fs.listStatus(result);
		for (FileStatus status : stati) {
			if (!status.isDir()) {
				Path path = status.getPath();
				LOG.info("FOUND " + path.toString());
				SequenceFile.Reader reader = new SequenceFile.Reader(fs, path,
						conf);
				ClusterCenter key = new ClusterCenter();
				Vector v = new Vector();
				while (reader.next(key, v)) {
					LOG.info(key + " / " + v);
				}
				reader.close();
			}
		}
		*/
	}

	/**
	 * @param conf
	 * @param in
	 * @param center
	 * @param fs
	 * @throws IOException
	 */
	private static void writeData(Configuration conf, Path in, Path center,
			FileSystem fs) throws IOException {

		final SequenceFile.Writer centerWriter = SequenceFile.createWriter(fs,
				conf, center, ClusterCenter.class, IntWritable.class);

		final IntWritable value = new IntWritable(0);
		centerWriter.append(new ClusterCenter(new Vector(1, 1)), value);
		centerWriter.append(new ClusterCenter(new Vector(5, 5)), value);
		centerWriter.close();

		final SequenceFile.Writer dataWriter = SequenceFile.createWriter(fs,
				conf, in, ClusterCenter.class, Vector.class);

		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(1, 2));
		dataWriter.append(new ClusterCenter(new Vector(0, 0)),
				new Vector(16, 3));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(3, 3));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(2, 2));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(2, 3));
		dataWriter.append(new ClusterCenter(new Vector(0, 0)),
				new Vector(25, 1));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(7, 6));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(6, 5));
		dataWriter.append(new ClusterCenter(new Vector(0, 0)), new Vector(-1,
				-23));
		dataWriter.close();
	}

	private static void initCenter(Configuration conf, Path in, Path center,
			FileSystem fs, int k) throws IOException {

		final SequenceFile.Reader reader = new SequenceFile.Reader(fs, in, conf);
		final SequenceFile.Writer centerWriter = SequenceFile.createWriter(fs,
				conf, center, ClusterCenter.class, IntWritable.class);
		// Random generator = new Random(System.currentTimeMillis());
		Random generator = new Random(123456789);
		final IntWritable writeValue = new IntWritable(0);

		ClusterCenter readKey = new ClusterCenter();
		Vector readValue = new Vector();
		int i = 0;
		while (i < k) {
			for (int j = 0, rnd = generator.nextInt(1000); j < rnd; j++) {
				reader.next(readKey, readValue);
			}
			ClusterCenter writeKey = new ClusterCenter(readValue);
			centerWriter.append(writeKey, writeValue);
			LOG.info("CENTER " + i + ": " + writeKey.toString());
			i++;
		}

		reader.close();
		centerWriter.close();

	}

	private static void printCenters(Configuration conf) {
		Path centroids = new Path(conf.get("centroid.path"));
		FileSystem fs;
		try {
			fs = FileSystem.get(conf);

			SequenceFile.Reader reader = new SequenceFile.Reader(fs, centroids,
					conf);
			ClusterCenter key = new ClusterCenter();
			IntWritable value = new IntWritable();
			int i = 0;
			while (reader.next(key, value)) {
				LOG.info("CENTER " + i + ": " + key.toString());
				i++;
			}
			reader.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	Random generator = new Random(System.currentTimeMillis());
}
