package examples.mr;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.Map.Entry;
import java.util.StringTokenizer;
import java.util.TreeMap;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class ScanGrep {

	public static final String regx = "static public class MyFirstMapper extends";
	    
	public static final String SPL = "|";

	static public class MyFirstMapper  
	                   extends
			Mapper<LongWritable, Text, LongWritable, Text> {

		String tttt = "   static public class MyFirstMapper extends    "; //for test
		private long chunkkey = -1;
		private String sum = null;

		protected void setup(Context context) throws IOException, InterruptedException{
		}
		
		
		@Override
		protected void cleanup(Context context) throws IOException,
				InterruptedException {
			if (sum != null && sum.length() > 0)
				context.write(new LongWritable(chunkkey), new Text(chunkkey
						+ "=" + sum));
		}

		@Override
		public void map(LongWritable key, Text line, Context context)
				throws IOException, InterruptedException {
			String aline = line.toString().trim();
			if (aline.isEmpty())
				return;

			if (chunkkey < 0)
				chunkkey = key.get();
			StringTokenizer itr = new StringTokenizer(aline);
			while (itr.hasMoreTokens()) {
				sum = plus(sum, itr.nextToken().trim());
			}

		}

	}

	public static class GrepReducer extends
			Reducer<LongWritable, Text, Text, NullWritable> {
		private static final Log LOG = LogFactory.getLog(GrepReducer.class);

		String interSum = "";

		@Override
		public void reduce(LongWritable key, Iterable<Text> values,
				Context context) throws IOException, InterruptedException {
			StringBuffer buf = new StringBuffer();
			LOG.info("constructing list");
			while (values.iterator().hasNext()) {
				buf.append(values.iterator().next().toString() + SPL);
			}

			context.write(new Text(buf.toString()), NullWritable.get());

		}
	}

	public static class GrepSecondMapper extends
			Mapper<LongWritable, Text, Text, NullWritable> {

		private static final Log LOG = LogFactory
				.getLog(GrepSecondMapper.class);

		int initFlg = -1;
		private TreeMap<Long, String> psums;
		private long startLineKey = -1;

		private String totalsum = "";

		@Override
		public void map(LongWritable key, Text list, Context context)
				throws IOException, InterruptedException {
			if (startLineKey < 0) {
				startLineKey = key.get();
				if (psums != null) {
					Entry<Long, String> de = psums.lowerEntry(startLineKey);

					if (de != null)
						totalsum += de.getValue();
				}

			}

			StringBuffer newList = new StringBuffer(1024);

			StringTokenizer itr = new StringTokenizer(list.toString());
			while (itr.hasMoreTokens()) {
				totalsum = plus(totalsum, itr.nextToken());
				if (ScanGrep.regx.equals(totalsum)){
					newList.append(key.get() + " : " + totalsum + " ");
					totalsum = null;
				}
			}

			if (!newList.toString().isEmpty()) {
				LOG.info("output a line: " + newList.toString());
				context.write(new Text(newList.toString()), NullWritable.get());
			}

		}


		@Override
		protected void setup(Context context) throws IOException,
				InterruptedException {

			Path[] paths = DistributedCache.getLocalCacheFiles(context
					.getConfiguration());  
			String partialSumList = "";
			long start = System.currentTimeMillis();
			BufferedReader reader = new BufferedReader(new FileReader(
					paths[0].toString()));  
			try {

				String line = null;
				while ((line = reader.readLine()) != null) {
					partialSumList += line;
				}
				partialSumList.trim();
 
			} catch (IOException x) {
				System.err.format("IOException: %s%n", x);
			}

			LOG.debug("finish reading partialSumList. Time ="
					+ (System.currentTimeMillis() - start));

			start = System.currentTimeMillis();
			if (partialSumList.length() > 0) {
				String[] tmp = partialSumList.split(SPL);
				psums = new TreeMap<Long, String>();

				for (String val : tmp) {
					if (val != null && val.length() > 0) {
						long key = Long.parseLong(val.split("=")[0]);
						psums.put(key, val.split("=")[1]); // scan
					}
				}
			}
			LOG.debug("finish parsing partialSumList. Time ="
					+ (System.currentTimeMillis() - start));

		}
	}

	public static String plus(String x, String y) {
		if (x == null || x.isEmpty()) {
			if (regx.contains(y))
				return y;
			else
				return null;
		}

		if (regx.contains(x)) {
			String tmp = x + " " + y;
			if (regx.contains(tmp))
				return tmp;
		}

		return null;
	}

	/**
	 * @param args
	 * @throws Exception
	 */
	public static void main(String[] args) throws Exception {

		Boolean ok = false;
		Path OUT_TMP = null;
		Path OUTPUT = null;
		Path INPUT = null;
		if (args.length >= 2) {
			INPUT = new Path(args[0]);
			OUTPUT = new Path(args[1]);

			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(conf);
			if (fs.exists(OUTPUT)) {
				fs.delete(OUTPUT, true);
			}

			Job job = new Job(conf, "Grep scan1");
			job.setJarByClass(ScanGrep.class);
			job.setMapperClass(MyFirstMapper.class);
			job.setReducerClass(GrepReducer.class);
			job.setNumReduceTasks(1);

			job.setMapOutputKeyClass(LongWritable.class);
			job.setMapOutputValueClass(Text.class);
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(NullWritable.class);
			job.setOutputFormatClass(TextOutputFormat.class);
			FileInputFormat.addInputPath(job, INPUT);
			OUT_TMP = new Path(args[1] + "_int");
			FileOutputFormat.setOutputPath(job, OUT_TMP);
			if (fs.exists(OUT_TMP)) {
				fs.delete(OUT_TMP, true);
			}
			ok = job.waitForCompletion(true);
		}

		if (ok) {

			Configuration conf2 = new Configuration();
			FileSystem fs = FileSystem.get(conf2);
			Path partialSumList = new Path(OUT_TMP, "part-r-00000");

			// String scanSum = fs.open(partialSumList).readLine().trim(); //
			// not very good
			Path localCache = new Path("/tmp/scan_mr/partialSumList.txt");
			fs.copyToLocalFile(partialSumList, localCache);

			Path cacheFile = new Path("scan/cache/partialSumList.txt");
			if (fs.exists(cacheFile))
				fs.delete(cacheFile, true);
			fs.copyFromLocalFile(localCache, cacheFile);
			DistributedCache.addCacheFile(cacheFile.toUri(), conf2);
			System.out.println(cacheFile.toUri());
			// conf2.set("partialSumListURI", localCache.toString());
			Job job2 = new Job(conf2, "Grep scan2");
			// job2.addCacheFile(partialSumList.toUri());
			job2.setJarByClass(GrepSecondMapper.class);
			job2.setMapperClass(GrepSecondMapper.class);
			job2.setNumReduceTasks(0);
			job2.setMapOutputKeyClass(Text.class);
			job2.setMapOutputValueClass(NullWritable.class);
			job2.setInputFormatClass(TextInputFormat.class);
			job2.setOutputFormatClass(TextOutputFormat.class);
			FileInputFormat.addInputPath(job2, new Path(args[0]));
			FileOutputFormat.setOutputPath(job2, new Path(args[1]));
			int rst = job2.waitForCompletion(true) ? 0 : 1;
			fs.deleteOnExit(OUT_TMP);
			fs.deleteOnExit(OUTPUT); // output are too large, so I delete them
			System.out.println("JOB finished but output are too large, so I delete them.");
			System.exit(rst);

		} else
			System.exit(1);

	}

}
