package cgl.hadoopsensorgrid.examples.dummy;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.StringTokenizer;
import java.lang.Integer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.util.GenericOptionsParser;

public class dummySingle {

	public static class dummyMapper extends
			Mapper<Object, Text, Text, BytesWritable> {
		String fileSource = "";

		public void map(Object key, Text value, Context context)
				throws IOException, InterruptedException {
			StringTokenizer itr = new StringTokenizer(value.toString(), "\n");
			String filename;

			while (itr.hasMoreTokens()) {
				filename = itr.nextToken();

				if (filename.startsWith("mapper")) {
					fileSource = filename;
					System.out.println("fileSource: " + fileSource);
					continue;
				}

				BufferedReader in = new BufferedReader(new FileReader(filename));
				String line = "";
				String filecontents = "";

				while ((line = in.readLine()) != null) {
					filecontents += line;
					filecontents += "\n";
				}
				filecontents += "\n";
				in.close();

				byte[] resultPayload = filecontents.getBytes();
				int len = resultPayload.length;
				// System.out.println("map bytes len: " + len);
				BytesWritable result = new BytesWritable();
				result.set(resultPayload, 0, len);

				Text text = new Text();

				if (fileSource.equals("mapper 1")) {
					text
							.set("/globalhome/chaosun/hadooptest/dummy/outputfiles/file1.txt");
				} else if (fileSource.equals("mapper 2")) {
					text
							.set("/globalhome/chaosun/hadooptest/dummy/outputfiles/file2.txt");
				} else if (fileSource.equals("mapper 3")) {
					text
							.set("/globalhome/chaosun/hadooptest/dummy/outputfiles/file3.txt");
				} else {
					System.err.println("Wrong fileSource: " + fileSource);
					System.exit(-1);
				}

				context.write(text, result);
			}
		}

	}

	public static class dummyReducer extends
			Reducer<Text, BytesWritable, Text, BytesWritable> {

		public void reduce(Text key, Iterable<BytesWritable> values, Context context)
				throws IOException, InterruptedException {
			for (BytesWritable val : values) {
				String filename = new String(key.getBytes());
				FileWriter filewriter = new FileWriter(filename, true);
				String text = new String(val.getBytes());
				// int len = text.length();
				int len = val.getLength();
				// System.out.println("reduce string len: " + len);
				filewriter.write(text, 0, len - 1);
				filewriter.flush();
				filewriter.close();
			}

			// BytesWritable result = new BytesWritable("1".getBytes);
			// context.write(key, result);
		}

	}

	public static void main(String[] args) throws Exception {
		String inputFilePath1 = "/globalhome/chaosun/hadooptest/dummy/inputfiles1.txt";
		String inputFilePath2 = "/globalhome/chaosun/hadooptest/dummy/inputfiles2.txt";
		String inputFilePath3 = "/globalhome/chaosun/hadooptest/dummy/inputfiles3.txt";
		String outputFilePath = "/globalhome/chaosun/hadooptest/dummy/outputfiles";
		int numberOfReducer = 1;

		Configuration conf = new Configuration();
		String[] otherArgs = new GenericOptionsParser(conf, args)
				.getRemainingArgs();
		if (otherArgs.length > 0) {
			numberOfReducer = Integer.parseInt(otherArgs[0]);
			System.out.println(numberOfReducer + " reducers");
		}
		if (otherArgs.length > 1) {
			System.err
					.println("Usage: dummySingle <inpath> <outpath> <map num> <reduce num>");
			System.exit(1);
		}

		Job job = new Job(conf, "dummy single");
		job.setJarByClass(dummySingle.class);
		job.setMapperClass(dummyMapper.class);
		job.setCombinerClass(dummyReducer.class);
		job.setReducerClass(dummyReducer.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(BytesWritable.class);
		job.setNumReduceTasks(numberOfReducer);

		FileInputFormat.addInputPath(job, new Path(inputFilePath1));
		FileInputFormat.addInputPath(job, new Path(inputFilePath2));
		FileInputFormat.addInputPath(job, new Path(inputFilePath3));
		FileOutputFormat.setOutputPath(job, new Path(outputFilePath));

		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}