package chen.bupt.mapreduce.board.splitboard;

import java.io.IOException;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;

import chen.bupt.constant.Constants;
import chen.bupt.util.HDFSFileUtils;

public class SplitBoardJob {
	private static final String inputPath = Constants.SEG_PATH;
	private static final String outputPath = Constants.BOARD_DATA;

	public static class SplitBoardMapper extends
			Mapper<LongWritable, Text, Text, Text> {
		
		Set<String> set = BoardInfo.getBoardID();
		
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			if (!HDFSFileUtils.isInputFile(context))
				return;
			String rec = value.toString();
			int index = rec.indexOf(";");
			rec = rec.substring(index + 1);
			index = rec.indexOf(";");
			if(!set.contains(rec.substring(0, index)))return;
			
			context.write(new Text(rec.substring(0, index)), new Text(rec
					.substring(index + 1)));
		}
	}

	public static class SplitBoardReducer extends
			Reducer<Text, Text, Text, NullWritable> {
		
		private MultipleOutputs<Text, NullWritable> mos;

		@Override
		protected void cleanup(Context context) throws IOException,
				InterruptedException {
			mos.close();
		}

		@Override
		protected void setup(Context context) throws IOException,
				InterruptedException {
			mos = new MultipleOutputs<Text, NullWritable>(context);
		}

		protected void reduce(Text key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			for (Text val : values) {
				mos.write(new Text(key.toString()+";"+val.toString()), NullWritable.get(), key.toString()+"/");
			}
		}
	}

	/**
	 * @param args
	 * @throws Exception 
	 */
	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		Job job = new Job(conf, "split_board");
		Path in = new Path(inputPath);
		Path out = new Path(outputPath);
		HDFSFileUtils.deleteFile(out, conf);
		FileInputFormat.setInputPaths(job, in);
		FileOutputFormat.setOutputPath(job, out);
		job.setJarByClass(SplitBoardJob.class);
		job.setMapperClass(SplitBoardMapper.class);
		job.setReducerClass(SplitBoardReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		job.waitForCompletion(true);
	}

}
