package cgl.mr.hadoop.kmeans;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class IterativeMapReduce extends Configured implements Tool {

	double launch(int numMapTasks,String dataFile, String clustersFile, String jt, String dfs)
			throws IOException, URISyntaxException {
		
		//===================Data and the Settings===================================//
		//int dataLen = 64;
		//int numMapTasks = 4;
		int numCentroids = 100;
		//String filePrefix="/globalhome/jaliya/projects/cglmr/kmeans_data/new_data/mth_4/256_";
        //String initCentroidData="/globalhome/thilina/hadoop_jars/programs/kmeans_data/init_clusters.txt";
		String initCentroidData = clustersFile;
        //===========================================================================//
        

        //Copying the data files and the initial centroids to the files in hdfs.
		Configuration configuration = getConf();
		if (jt != null) {
			configuration.set("mapred.job.tracker", jt);
		}
		if (dfs != null) {			
			FileSystem.setDefaultUri(configuration, dfs);
		}
		
		
		
		/////////////Added to test the file writing and reading from map//////////
		//First get the file system handler, delete any previous file, add the file
		//and write the data to it, then pass its name as a parameter to jobconf
		Path testDir = new Path("test-my-k");
		FileSystem fs = FileSystem.get(configuration);
		fs.delete(testDir,true);

		Path vDir = new Path(testDir, "data");
		
		if (!fs.mkdirs(vDir)) {
			throw new IOException("Mkdirs failed to create " + vDir.toString());
		}		
		        
		double data[][]=loadDataFromFile(dataFile);		
		int len=data.length;
		int perFile=len/numMapTasks;
		System.out.println("Writing "+perFile+" vectors to a file");
		
		int startPos=0;
		int endPos=0;
		
		for (int k = 0; k < numMapTasks; k++) {
			startPos=k*perFile;
			endPos=(k+1)*perFile;
			if(k==numMapTasks-1){
				endPos=len;
			}
			
			Path vFile = new Path(vDir, "data_" + k);
			SequenceFile.Writer vWriter = SequenceFile.createWriter(fs,	configuration, vFile, IntWritable.class, Text.class,CompressionType.NONE);
			for (int i = startPos; i < endPos; i++) {				
				vWriter.append(new IntWritable(i), new Text(data[i][0] + " " + data[i][1]));
			}

			vWriter.close();
			System.out.println("Wrote data to file " + k);
		}

		Path cDir = new Path(testDir, "centroids");
		if (!fs.mkdirs(cDir)) {
			throw new IOException("Mkdirs failed to create " + cDir.toString());
		}

		data=loadDataFromFile(initCentroidData);
		Path initClustersFile = new Path(cDir, "centroid_0");
		SequenceFile.Writer cWriter = SequenceFile.createWriter(fs, configuration,initClustersFile, IntWritable.class, V2DDataWritable.class,CompressionType.NONE);
		for (int i = 0; i < numCentroids; i++) {
			cWriter.append(new IntWritable(i), new V2DDataWritable(data[i][0],data[i][1]));
		}

		cWriter.close();
		System.out.println("Wrote centroids data to file");

		
		//============================Starting the Iterations==================================//
		System.out.println("Starting Job");
		long startTime = System.currentTimeMillis();
		double error = 0.0;		
		int count=0;
		do{
			Job job = new Job(configuration,"test-kmeans"+count);
			Configuration jobConfig= job.getConfiguration();
			if (jt != null) {
				jobConfig.set("mapred.job.tracker", jt);
			}
			if (dfs != null) {
				FileSystem.setDefaultUri(jobConfig, dfs);
			}
			
			Path outDir = new Path(testDir, "out-"+String.valueOf(count));
			
			FileInputFormat.setInputPaths(job, vDir);
			FileOutputFormat.setOutputPath(job, outDir);
			
			Path cFile = new Path(cDir, "centroid_"+count);
			URI cFileURI = new URI(cFile.toString() +
                    "#" + "centroid_"+count);
			DistributedCache.addCacheFile(cFileURI,jobConfig);
//			System.out.println("cfile symlink "+cFileURI);
			DistributedCache.createSymlink(jobConfig);
			jobConfig.set("c-file", "centroid_"+count);
			jobConfig.set("iteration", String.valueOf(count));
			
//			job.setMSpeculativeExecution(false);
			job.setInputFormatClass(SequenceFileInputFormat.class);

			job.setOutputKeyClass(IntWritable.class);
			job.setOutputValueClass(Text.class);
			job.setOutputFormatClass(SequenceFileOutputFormat.class);

			job.setJarByClass(IterativeMapReduce.class);
			job.setMapperClass(KmeansMapper.class);
			job.setReducerClass(KmeansReducer.class);
		
			/**
			 * How to specify this or do rather do we need to specify this?
			 */
//			job.setNumMapTasks(numMapTasks);
			job.setNumReduceTasks(1);
			
			try {			
				
				job.waitForCompletion(true);
				
				Path inFile = new Path(outDir, "reduce-out");
				SequenceFile.Reader reader = new SequenceFile.Reader(fs, inFile,jobConfig);
				IntWritable key = new IntWritable();
				Text txtError = new Text();
				reader.next(key, txtError);
				reader.close();
				error = Double.valueOf(txtError.toString());
				System.out.println("Error ="+error);
				
				//<<<<<<<<<<<<<<<<<<<<<
				/*
				try {
				
					Path path = new Path("test-my-k/centroids/centroid_"+(count+1));
					SequenceFile.Reader r = new SequenceFile.Reader(fs, path,jobConfig);
					try {
						IntWritable k = new IntWritable();
						V2DDataWritable v = new V2DDataWritable();
						
						while (r.next(k, v)) {			
							System.out.println(v.getValue1()+"  ,  "+v.getValue2());

						}
					} finally {
						r.close();
					}				
					
				} catch (IOException e) {
					throw new RuntimeException(e);
				}*/
							
				
				//>>>>>>>>>>>>>>>>>>>>>>			
				
			} catch (Exception e){
				e.printStackTrace();
				//fs.delete(testDir);
			}
//			job.deleteLocalFiles();
			if (error>1)
				fs.delete(cFile, true);
			count++;
		}while(error>1);
		//============================End all the Iterations==================================//

		System.out.println("Job Finished in "+ (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");
		System.out.println("Number of iterations = "+count);
		return error;
	
	}

	/**
	 * Launches all the tasks in order.
	 */
	public int run(String[] args) throws Exception {
		if (args.length < 2) {
		System.err.println("Usage: IterativeMapReduce <data file> <clusters file> <number of map tasks> ");
			ToolRunner.printGenericCommandUsage(System.err);
			return -1;
		}

		String dataFile=args[0];
		String clusterFile =  args[1];
		int numMapTasks = Integer.parseInt(args[2]);
		
		System.out.println( " Number of Map Tasks = "	+ numMapTasks);
		System.out.println("Final Error is "	+ launch(numMapTasks,dataFile, clusterFile, null, null));

		return 0;
	}

	public static void main(String[] argv) throws Exception {
		int res = ToolRunner.run(new Configuration(), new IterativeMapReduce(),
				argv);
		System.exit(res);
	}

	public static double[][] loadDataFromFile(String fileName)
			throws IOException {

		File file = new File(fileName);
		BufferedReader reader = new BufferedReader(new FileReader(file));
		int numRecords = 0;
		String inputLine = null;
		while ((inputLine = reader.readLine()) != null) {
			numRecords++;
		}
		reader.close();
		if (numRecords > 0) {
			reader = new BufferedReader(new FileReader(file));
			inputLine = reader.readLine();
		}
		String[] vectorValues = inputLine.split(" ");
		double data[][] = new double[numRecords][vectorValues.length];
		reader.close();

		reader = new BufferedReader(new FileReader(file));

		numRecords = 0;
		while ((inputLine = reader.readLine()) != null) {
			vectorValues = inputLine.split(" ");
			for (int i = 0; i < vectorValues.length; i++) {
				data[numRecords][i] = Double.valueOf(vectorValues[i]);
			}
			numRecords++;
		}

		return data;

	}

}
