package ipl;
/*
 * YU LIU : 2012-11-30 @ Kochi
 */
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Hashtable;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.chain.ChainMapper;
import org.apache.hadoop.mapreduce.lib.chain.ChainReducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class TestScan {

	public static class FirstMapper extends
			Mapper<LongWritable, Text, IntWritable, IntPair> {

		private final static IntWritable one = new IntWritable(1);

		// private Text list;

		protected void setup(Context context) throws IOException,
				InterruptedException {
			// get
		}

		public void map(LongWritable key, Text list, Context context)
				throws IOException, InterruptedException {
			StringTokenizer itr = new StringTokenizer(list.toString());
			String[] eles = list.toString().trim().split(" ");
			int sum = 0;
			while (itr.hasMoreTokens()) {
				sum += Integer.parseInt(itr.nextToken());
			}
//			for (int i = 0; i < eles.length; i++) {
//				sum += Integer.parseInt(eles[i]);
//			}
			context.write(one, new IntPair((int) key.get(), sum));

		}
	}


	public static class ScanReducer extends
			Reducer<IntWritable, IntPair, IntWritable, Text> {
		String interSum = "";

		public void reduce(IntWritable key, Iterable<IntPair> values,
				Context context) throws IOException, InterruptedException {
			ArrayList<IntPair> sorted = new ArrayList<IntPair>();

			for (IntPair val : values) {
				IntPair tmp = new IntPair(val.first, val.second);
				sorted.add(tmp);
			}
			util.sortList(sorted); //sorting, for dis-parallel cases
			interSum = util.list2String(sorted);
			// sorted.
			context.write(key, new Text(util.list2String(sorted)));

		}

		protected void cleanup(Context context) throws IOException,
				InterruptedException {
			context.getConfiguration().set("Scan_test", interSum);
		}
	}

	public static class SecondMapper extends
			Mapper<LongWritable, Text, Text, NullWritable> {

		private ArrayList<Integer> scanList;
		private Hashtable<Integer, Integer> keyIndex;

		protected void setup(Context context) throws IOException,
				InterruptedException {
			keyIndex = new Hashtable<Integer, Integer>();
			String[] tmp = context.getConfiguration().get("Scan_test").trim()
					.split(";");
			scanList = new ArrayList<Integer>();

			scanList.add(0);
			int sum = 0;
			int counter = 0;
			for (String val : tmp) {
				if (val != null && val.length() > 0) {
					int key = Integer.parseInt(val.split("=")[0]);
					int parSum = Integer.parseInt(val.split("=")[1]);
					keyIndex.put(key, counter);
					sum += parSum;
					scanList.add(sum); // add all of them to this list, from 0
										// to last partial-sum
				}
				counter++;
			}

		}

		public void map(LongWritable key, Text list, Context context)
				throws IOException, InterruptedException {
			String[] eles = list.toString().trim().split(" ");
			int idx = (int) key.get();
			String newList = "";
			// prescan
			int sum = scanList.get(keyIndex.get(idx));
			for (int i = 0; i < eles.length; i++) {
				newList += sum+ " ";
				sum += Integer.parseInt(eles[i]);
			}
			context.write(new Text(newList), NullWritable.get());
		}
	}

	public static void chinedMR() throws Exception {
		Configuration conf = new Configuration();
		Job job = new Job(conf, "scan1");

		Configuration mapAConf = new Configuration(false);

		ChainMapper.addMapper(job, FirstMapper.class, LongWritable.class,
				Text.class, Text.class, Text.class, mapAConf);

		Configuration mapBConf = new Configuration(false);

		Configuration reduceConf = new Configuration(false);

		ChainReducer.setReducer(job, ScanReducer.class, LongWritable.class,
				Text.class, Text.class, Text.class, reduceConf);

		ChainReducer.addMapper(job, SecondMapper.class, Text.class, Text.class,
				LongWritable.class, Text.class, null);

	}
	/**
	 * @param args
	 */
	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		Job job = new Job(conf, "scan1");
		job.setJarByClass(TestScan.class);
		job.setMapperClass(FirstMapper.class);
		job.setReducerClass(ScanReducer.class);

		job.setMapOutputKeyClass(IntWritable.class);
		job.setMapOutputValueClass(IntPair.class);
		job.setOutputKeyClass(IntWritable.class);
		job.setOutputValueClass(Text.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		FileInputFormat.addInputPath(job, new Path(args[0]));
		Path out = new Path(args[1] + "_int");
		FileOutputFormat.setOutputPath(job, out);
		Boolean ok = job.waitForCompletion(true);
		if (ok) {
			String scanSum = FileSystem.get(conf)
					.open(new Path(out, "part-r-00000")).readLine().trim(); // not very good
			scanSum = scanSum.split("\t")[1];
			Configuration conf2 = new Configuration();
			conf2.set("Scan_test", scanSum);
			Job job2 = new Job(conf2, "scan2");
			job2.setMapperClass(SecondMapper.class);
			job2.setNumReduceTasks(0);
			job2.setMapOutputKeyClass(Text.class);
			job2.setMapOutputValueClass(NullWritable.class);
			// job2.setOutputKeyClass(NullWritable.class);
			// job2.setOutputValueClass(Text.class);
			job2.setInputFormatClass(TextInputFormat.class);
			job2.setOutputFormatClass(TextOutputFormat.class);
			FileInputFormat.addInputPath(job2, new Path(args[0]));
			FileOutputFormat.setOutputPath(job2, new Path(args[1] + "_final"));
			System.exit(job2.waitForCompletion(true) ? 0 : 1);

		} else
			System.exit(1);

	}

}
