package com.song.hadoop01;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;


/**
 * 
 * @author root
 *   Configured 这个类里面有获取配置相关 get/set 
 *    Tool 里面有 Tool.run 
 */
public class WordCount extends Configured implements Tool  {
	
	public static class WordCountMap extends Mapper<LongWritable, Text, Text, IntWritable>
	{
			private final IntWritable one = new IntWritable(1);
			
			private Text word = new Text();

			@Override
			protected void map(LongWritable key, Text value,
					Mapper<LongWritable, Text, Text, IntWritable>.Context context)
							throws IOException, InterruptedException {
				StringTokenizer token = new StringTokenizer(value.toString());
				while(token.hasMoreTokens())
				{
					String seg = token.nextToken();
					word.set(seg);
					context.write(word, one);
				}
			}
	}
	
	// 前面的输出是后面的输入
	
	public static class wordCountReduce extends Reducer<Text, IntWritable, Text, IntWritable>
	{

		@Override
		protected void reduce(Text key, Iterable<IntWritable> values,
				Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			int sum = 0;
			for(IntWritable val :values)
			{
				sum+=val.get();
			}
			
			context.write(key, new IntWritable(sum));
			
		}
		
		
		
		
	}
	
	
	public static void main(String[] args)  {
					try {
						int ret = ToolRunner.run(new WordCount(), args);
						
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
		
		
		
	}


	



	public int run(String[] args) throws Exception {
		Configuration conf = new Configuration();
		Job job = new Job(conf);
		job.setJarByClass(WordCount.class);
		job.setJobName("word count");
		
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		
		job.setMapperClass(WordCountMap.class);
		job.setReducerClass(wordCountReduce.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		job.waitForCompletion(true);
		return job.isSuccessful()? 1:0;
	}

	
	
}
