package cshl.edu;

import java.io.IOException;
import java.util.*;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

public class WordCount {

	public static class Map extends MapReduceBase implements
			Mapper<LongWritable, Text, Text, Text> {
		private final static Text merKey = new Text();
		private final static Text merVal = new Text();

		public void map(LongWritable key, Text val,
				OutputCollector<Text, Text> output, Reporter reporter)
				throws IOException {

			String line = val.toString(); 
			int pos1 = line.indexOf('('); 
			int pos2 = line.indexOf(','); 
			int pos3 = line.indexOf(')');

			if (pos1 > 0 && pos2 > 0 && pos3 > 0) { 
				String label = line.substring(pos1 + 1, pos2); 
//				System.err.println("Debug::label::" + label);
				Integer index = Integer.parseInt(line.substring(pos2 + 1, pos3));
				index = index * 60;
				index ++;
//				System.err.println("Debug::index::" + index.toString());
				String seq = line.substring(0, pos1); 
				int len = seq.length(); 
				for (int ix = 0; ix < len - 21; ix++) { 
					String merKeyString = label + seq.substring(ix, ix + 21);
					String merValString = "" + index.toString(); 
//					System.err.println("Debug::merValString::" + merValString);
//					System.err.println("Debug::merKeyString::" + merKeyString);
					merKey.set(merKeyString);
					merVal.set(merValString); 
					output.collect(merKey, merVal);
					index++; 
				}
			}
			//FileSplit fileSplit = (FileSplit) reporter.getInputSplit();
			//String fileName = fileSplit.getPath().getName();
			//merVal.set(fileName);

			//String line = val.toString();
			//StringTokenizer itr = new StringTokenizer(line.toLowerCase());
			//while (itr.hasMoreTokens()) {
				//merKey.set(itr.nextToken());
				//output.collect(merKey, merVal);
			//}
			
		}

		/*
		 * // private final static IntWritable one = new IntWritable(1); private
		 * Text word = new Text();
		 * 
		 * public void map(LongWritable key, Text value, OutputCollector<Text,
		 * Text> output, Reporter reporter) throws IOException { String line =
		 * value.toString(); int pos1 = line.indexOf('('); int pos2 =
		 * line.indexOf(','); int pos3 = line.indexOf(')');
		 * 
		 * if (pos1 > 0 && pos2 > 0 && pos3 > 0) { String label =
		 * line.substring(pos1 + 1, pos2); Integer index = Integer
		 * .parseInt(line.substring(pos2 + 1, pos3)); String seq =
		 * line.substring(0, pos1); int len = seq.length(); for (int ix = 0; ix
		 * < len - 21; ix++) { word.set(label + seq.substring(ix, ix + 21));
		 * index = index * 60 + ix + 1; String val = ""; val =
		 * val.concat(label); val = val.concat(index.toString());
		 * System.out.println("Debug::val::" + val); Text valText = new Text();
		 * valText.set(val); System.out.println("Debug::valText::" +
		 * valText.toString()); output.collect(word, valText); } } }
		 */
	}

	public static class Reduce extends MapReduceBase implements
			Reducer<Text, Text, Text, Text> {
		public void reduce(Text key, Iterator<Text> values,
				OutputCollector<Text, Text> output, Reporter reporter)
				throws IOException {

			/*
			boolean first = true;
			StringBuilder toReturn = new StringBuilder();
			Integer sum = new Integer(0);
			while (values.hasNext()) {
				if (!first)
					toReturn.append(",");
				first = false;
				toReturn.append(values.next().toString());
				sum++;
			}
			String toReturnString = toReturn.toString();
			System.err.println("Debug::toReturnString::" + toReturnString);
			String chromosome = key.toString().substring(0,2);
			System.err.println("Debug::chromosome::" + chromosome);
			StringTokenizer tokenizer = new StringTokenizer(toReturnString, ",");
			Integer count = new Integer(0);
			while(tokenizer.hasMoreTokens()){
				tokenizer.nextToken();
				count++;
			}
			System.err.println("Debug::count::" + count);
			tokenizer = new StringTokenizer(toReturnString, ",");
			while(tokenizer.hasMoreTokens()){
				String tok = tokenizer.nextToken();
				//tok = tok.substring(2);
				System.err.println("Debug::tok::" + tok);
				//key.set(tok);
				System.err.println("Debug::key::" + key.toString());
				output.collect(key, new Text(count.toString() + ";" + tok));
			}
			
			//output.collect(key, new Text(toReturnString));
			 */
			boolean first = true;
		      StringBuilder toReturn = new StringBuilder();
		      while (values.hasNext()){
		        if (!first)
		          toReturn.append(",");
		        first=false;
		        toReturn.append(values.next().toString());
		      }
		      output.collect(key, new Text(toReturn.toString()));
		}

		/*
		 * public void reduce(Text key, Iterator<Text> values,
		 * OutputCollector<Text, Text> output, Reporter reporter) throws
		 * IOException { int sum = 0; String vals = ""; while (values.hasNext())
		 * { // sum += values.next().get(); Text txt = values.next();
		 * System.out.println("Debug::val::txtString" + txt.toString());
		 * System.out.println("Debug::val::txtBytes" +
		 * txt.getBytes().toString());
		 * System.out.println("Debug::val::txtLength" + txt.getLength()); //vals
		 * = vals.concat(values.next().g); sum++; }
		 * //System.out.println("Debug::vals::" + vals); output.collect(new
		 * Text(key), new Text(""+ sum)); }
		 */
	}

	public static void main(String[] args) throws Exception {
		JobConf conf = new JobConf(WordCount.class);
		conf.setJobName("LineIndexer");

		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(Text.class);

		conf.setMapperClass(Map.class);
		conf.setCombinerClass(Reduce.class);
		conf.setReducerClass(Reduce.class);

		conf.setNumMapTasks(200);
		conf.setNumReduceTasks(200);
		
		conf.setCompressMapOutput(true);
		
		//conf.setInputFormat(TextInputFormat.class);
		//conf.setOutputFormat(TextOutputFormat.class);

		FileInputFormat.setInputPaths(conf, new Path(args[0]));
		FileOutputFormat.setOutputPath(conf, new Path(args[1]));

		JobClient.runJob(conf);
	}
}