package test.kmeans;

import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Random;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;

import test.wordcount.TextRecord;
import cache.io.CacheInputFormat;

public class KMeansClusteringJob {
	
	private static final Log LOG = LogFactory.getLog(KMeansClusteringJob.class);
	
	public static final String OPT_INPUT_TABLE     = "t"; // Use a database
	public static final String OPT_INPUT_DATA_FILE = "f"; 
	public static final String OPT_INPUT_CENTERS_FILE = "c";
	
	public static final String OPT_OUTPUT         = "o";
	public static final String OPT_NUM_OF_MAPPERS = "m";
	public static final String OPT_USE_CACHE      = "s";
	
	static Options options;
	static CommandLine line = null;
	
	
	@SuppressWarnings("static-access")
	private static void setupCmdLineOptions() {
		
		options = new Options();
		
		/*
		 * General options not related to intput. 
		 */
		Option outputFilename = OptionBuilder.withArgName("filename")
			.withDescription("output file")
			.hasArg()
			.isRequired()
			.create(OPT_OUTPUT);

		Option numOfMappers = OptionBuilder.withArgName("number")
			.withDescription("number of mappers")
			.hasArg()
			.isRequired()
			.create(OPT_NUM_OF_MAPPERS);
		
		Option useCache = OptionBuilder
			.withDescription("use cache")
			.create(OPT_USE_CACHE);
		
		/*
		 * Input options
		 */
		Option tableName = OptionBuilder
			.withArgName("table")
			.withDescription("input table name")
			.hasArg()
			.create(OPT_INPUT_TABLE);
		
		Option inputFileName = OptionBuilder
			.withArgName("filename")
			.withDescription("input file name")
			.hasArg()
			.create(OPT_INPUT_DATA_FILE);
		
		OptionGroup inputOptions = new OptionGroup();
		inputOptions.addOption(tableName);
		inputOptions.addOption(inputFileName);
		inputOptions.setRequired(true);
		

		options.addOption(outputFilename);
		options.addOption(numOfMappers);
		options.addOption(useCache);

		options.addOptionGroup(inputOptions);
	}
	
	private static boolean isDbInput() {
		return line.hasOption(OPT_INPUT_TABLE);
	}
	
	private static void setInputMethod(Job job) throws IOException {
		
		
		if (isDbInput()) {
			/*
			 * Using a database
			 */
			String connString = "jdbc:mysql://mysqlsrv1/dbs131_user25";
			DBConfiguration.configureDB (
					job.getConfiguration(), 
					"com.mysql.jdbc.Driver",
	                connString, "dbs131_user25", "dbsdbs"
	        );

			String[] fields = { "line" }; //Specifies the Fields to be fetched from DB
			
	        String queryString = line.getOptionValue(OPT_INPUT_TABLE);
			DBInputFormat.setInput(
	        		job, 
	        		TextRecord.class, 
	        		queryString, /* table name */
	                null, /* conditions */ 
	                null, 
	                fields); // Specifies the DB table and fields
			
			if (line.hasOption(OPT_USE_CACHE)) {
				job.setInputFormatClass(CacheInputFormat.class);
				CacheInputFormat.setDelegateInputFormatData(job, DBInputFormat.class, connString + "|" + queryString);
			} else {
				job.setInputFormatClass(DBInputFormat.class);
			}			
			
		} else {
			/*
			 * Using a file
			 */
			String inputFilename = line.getOptionValue(OPT_INPUT_DATA_FILE);
			FileInputFormat.addInputPath(job, new Path(inputFilename));
			
			if (line.hasOption(OPT_USE_CACHE)) {
				job.setInputFormatClass(CacheInputFormat.class);
				CacheInputFormat.setDelegateInputFormatData(job, TextInputFormat.class, inputFilename);
			} else {
				job.setInputFormatClass(TextInputFormat.class);
			}			
		}
		

	}
	
	public static void main(String[] args) throws Exception,
			InterruptedException, ClassNotFoundException {
		
		setupCmdLineOptions();
		

		int iteration = 1;
		Configuration conf = new Configuration();
		Job job = new Job(conf);
		conf.set("num.iteration", iteration + "");

	    try {
	    	CommandLineParser parser = new GnuParser();
	        line = parser.parse( options, args );
	    }
	    catch( ParseException exp ) {
		    HelpFormatter formatter = new HelpFormatter();
		    formatter.printHelp( "WordCount", options );
		    System.exit(-1);
	    }
	    
		int numOfMappers = 2;
		try {
			numOfMappers = Integer.parseInt(line.getOptionValue(OPT_NUM_OF_MAPPERS));
		} catch (NumberFormatException e) {
			System.err.println("Number of mappers is not ok.");
			System.exit(-1);
		} 
		job.getConfiguration().setInt("mapred.map.tasks", numOfMappers);
	    
		setInputMethod(job);
		
		Path centers = new Path(line.getOptionValue(OPT_INPUT_CENTERS_FILE));
		conf.set("centroid.path", centers.toString());
		Path out = new Path(line.getOptionValue(OPT_OUTPUT) + "/clustering/depth_1");

		job.setJobName("KMeans Clustering " + ArrayUtils.toString(args));

		job.setMapperClass(KMeansMapper.class);
		job.setReducerClass(KMeansReducer.class);
		job.setJarByClass(KMeansMapper.class);
		

		FileSystem fs = FileSystem.get(conf);
		if (fs.exists(out))
			fs.delete(out, true);

		if (fs.exists(centers))
			fs.delete(centers, true);

		int k = Integer.parseInt(args[0]);
		
		if(isDbInput()) {
			initCentersDb(job, centers, fs, k);
		} else {
			initCenterFile(job, centers, fs, k);
		}
		/*
		 * if (fs.exists(in)) fs.delete(out, true);
		 * 
		 * writeData(conf, in, center, fs);
		 */

		SequenceFileOutputFormat.setOutputPath(job, out);
		job.setInputFormatClass(SequenceFileInputFormat.class);
		job.setOutputFormatClass(SequenceFileOutputFormat.class);

		job.setOutputKeyClass(ClusterCenter.class);
		job.setOutputValueClass(Vector.class);
		// FileOutputFormat.setCompressOutput(job, true);
		// FileOutputFormat.setOutputCompressorClass(job, LzoCodec.class);

		job.waitForCompletion(true);

		long counter = job.getCounters().findCounter(
				KMeansReducer.Counter.CONVERGED).getValue();

		iteration++;
		
		/**
		while (counter > 0) {
			conf = new Configuration();
			conf.set("centroid.path", center.toString());
			conf.set("num.iteration", iteration + "");
			job = new Job(conf);
			job.setJobName("KMeans Clustering " + iteration);

			job.setMapperClass(KMeansMapper.class);
			job.setReducerClass(KMeansReducer.class);
			job.setJarByClass(KMeansMapper.class);

			// in = new Path("files/clustering/depth_" + (iteration - 1) + "/");
			in = new Path(PATH_DATA);
			out = new Path("files/clustering/depth_" + iteration);

			SequenceFileInputFormat.addInputPath(job, in);
			if (fs.exists(out))
				fs.delete(out, true);

			SequenceFileOutputFormat.setOutputPath(job, out);
			job.setInputFormatClass(SequenceFileInputFormat.class);
			job.setOutputFormatClass(SequenceFileOutputFormat.class);
			job.setOutputKeyClass(ClusterCenter.class);
			job.setOutputValueClass(Vector.class);
			// FileOutputFormat.setCompressOutput(job, true);
			// FileOutputFormat.setOutputCompressorClass(job, LzoCodec.class);

			job.waitForCompletion(true);
			iteration++;
			counter = job.getCounters().findCounter(
					KMeansReducer.Counter.CONVERGED).getValue();
			printCenters(conf);
		}
		*/

		/**
		Path result = new Path("files/clustering/depth_" + (iteration - 1)
				+ "/");

		FileStatus[] stati = fs.listStatus(result);
		for (FileStatus status : stati) {
			if (!status.isDir()) {
				Path path = status.getPath();
				LOG.info("FOUND " + path.toString());
				SequenceFile.Reader reader = new SequenceFile.Reader(fs, path,
						conf);
				ClusterCenter key = new ClusterCenter();
				Vector v = new Vector();
				while (reader.next(key, v)) {
					LOG.info(key + " / " + v);
				}
				reader.close();
			}
		}
		*/
	}

	private static void initCentersDb(Job job, Path center, FileSystem fs, int k) throws Exception {
		
		Configuration conf = job.getConfiguration();
		DBConfiguration dbConf = new DBConfiguration(conf);
		Connection conn = dbConf.getConnection();

		final SequenceFile.Writer centerWriter = SequenceFile.createWriter(fs,
				conf, center, ClusterCenter.class, IntWritable.class);
		// Random generator = new Random(System.currentTimeMillis());
		Random generator = new Random(123456789);
		final IntWritable writeValue = new IntWritable(0);

		ClusterCenter readKey = new ClusterCenter();
		Vector readValue = new Vector();
	
		Statement stmt = null;
		ResultSet rs   = null;
		
		try {
			stmt = conn.createStatement();
			rs = stmt.executeQuery(dbConf.getInputQuery());
			int i = 0;
			while(i < k) {
				rs.next();
				String line = rs.getString(0);
				String[] arr = line.split(",");
				double[] nums = new double[arr.length];
				for (int j = 0; j < nums.length; j++) {
				    nums[i] = Double.parseDouble(arr[j]);
				}
				readValue.setVector(nums);
				ClusterCenter writeKey = new ClusterCenter(readValue);
				centerWriter.append(writeKey, writeValue);
				LOG.info("CENTER " + i + ": " + writeKey.toString());
				i++;
			}
			
		} catch (SQLException e) {
			e.printStackTrace();
		}
		
		centerWriter.close();

		
	}

	/**
	 * @param conf
	 * @param in
	 * @param center
	 * @param fs
	 * @throws IOException
	 */
	private static void writeData(Configuration conf, Path in, Path center,
			FileSystem fs) throws IOException {

		final SequenceFile.Writer centerWriter = SequenceFile.createWriter(fs,
				conf, center, ClusterCenter.class, IntWritable.class);

		final IntWritable value = new IntWritable(0);
		centerWriter.append(new ClusterCenter(new Vector(1, 1)), value);
		centerWriter.append(new ClusterCenter(new Vector(5, 5)), value);
		centerWriter.close();

		final SequenceFile.Writer dataWriter = SequenceFile.createWriter(fs,
				conf, in, ClusterCenter.class, Vector.class);

		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(1, 2));
		dataWriter.append(new ClusterCenter(new Vector(0, 0)),
				new Vector(16, 3));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(3, 3));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(2, 2));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(2, 3));
		dataWriter.append(new ClusterCenter(new Vector(0, 0)),
				new Vector(25, 1));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(7, 6));
		dataWriter
				.append(new ClusterCenter(new Vector(0, 0)), new Vector(6, 5));
		dataWriter.append(new ClusterCenter(new Vector(0, 0)), new Vector(-1,
				-23));
		dataWriter.close();
	}

	private static void initCenterFile(Job job, Path center, FileSystem fs, int k) throws IOException {

		Configuration conf = job.getConfiguration();
		Path in = FileInputFormat.getInputPaths(job)[0];
		final SequenceFile.Reader reader = new SequenceFile.Reader(fs, in, conf);
		final SequenceFile.Writer centerWriter = SequenceFile.createWriter(fs,
				conf, center, ClusterCenter.class, IntWritable.class);
		// Random generator = new Random(System.currentTimeMillis());
		Random generator = new Random(123456789);
		final IntWritable writeValue = new IntWritable(0);

		ClusterCenter readKey = new ClusterCenter();
		Vector readValue = new Vector();
		int i = 0;
		while (i < k) {
			for (int j = 0, rnd = generator.nextInt(1000); j < rnd; j++) {
				reader.next(readKey, readValue);
			}
			ClusterCenter writeKey = new ClusterCenter(readValue);
			centerWriter.append(writeKey, writeValue);
			LOG.info("CENTER " + i + ": " + writeKey.toString());
			i++;
		}

		reader.close();
		centerWriter.close();

	}

	private static void printCenters(Configuration conf) {
		Path centroids = new Path(conf.get("centroid.path"));
		FileSystem fs;
		try {
			fs = FileSystem.get(conf);

			SequenceFile.Reader reader = new SequenceFile.Reader(fs, centroids,
					conf);
			ClusterCenter key = new ClusterCenter();
			IntWritable value = new IntWritable();
			int i = 0;
			while (reader.next(key, value)) {
				LOG.info("CENTER " + i + ": " + key.toString());
				i++;
			}
			reader.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	Random generator = new Random(System.currentTimeMillis());
}
