package cn.hyxy.hadoop;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class Demo07_Resort_CountSort extends Configured implements Tool{

	public static void main(String[] args) throws Exception {
		int code = ToolRunner.run(new Demo07_Resort_CountSort(), args);
		System.exit(code);
	}

	@Override
	public int run(String[] args) throws Exception {
		if (args.length!=2) {
			System.out.println("Usage...");
			return -1;
		}
		
		Configuration config =new Configuration();
		
		Path path = new Path(args[1]);
		FileSystem fSystem=FileSystem.get(config);
		if (fSystem.exists(path)) {
			fSystem.delete(path,true);
		}
		
		Job job = Job.getInstance(config, "倒排索引-按字符多少排序");
		job.setJarByClass(getClass());
		
		job.setMapperClass(MyMapper.class);
		job.setMapOutputKeyClass(LongWritable.class);
		job.setMapOutputValueClass(Text.class);
		
		job.setReducerClass(MyReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(LongWritable.class);
		
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, path);
		
		return job.waitForCompletion(true)?0:1;
	}
	
	
	public static class MyMapper extends Mapper<LongWritable, Text, LongWritable, Text>{
		private Text value2=new Text();
		private LongWritable key2=new LongWritable(1);
		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, LongWritable, Text>.Context context)
				throws IOException, InterruptedException {
			String[] strings = value.toString().split("\\s+");			
			value2.set(strings[0]+"\t"+strings[1]);
			key2.set(Long.parseLong(strings[2]));
			context.write(key2, value2);
		}
	}
	
	public static class MyReducer extends Reducer<LongWritable, Text, Text, LongWritable>{
		@Override
		protected void reduce(LongWritable key3, Iterable<Text> value3,
				Reducer<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {
			
			for (Text text : value3) {
				context.write(text, key3);
			}
		}
	}

}
