package chapter1.service;

import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;

public class WordCount {
	public static class Map extends MapReduceBase implements Mapper<LongWritable,Text,Text,IntWritable>{
		private final static IntWritable one=new IntWritable(1)	;
		private Text word=new Text();

		@Override
		public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter arg3)
				throws IOException {
			
			String line=value.toString();
			StringTokenizer tokenizer=new StringTokenizer(line);
			while(tokenizer.hasMoreTokens()){
				word.set(tokenizer.nextToken());
				output.collect(word, one);
			}
			
		}
		
	}
	
	
  public static class Reduce extends MapReduceBase implements Reducer<Text,IntWritable,Text,IntWritable>{

	@Override
	public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter arg3)
			throws IOException {
		// 这个地方是一个求和的操作
		
		int sum=0;
		while(values.hasNext()){
			sum+=values.next().get();
		}
		output.collect(key, new IntWritable(sum));
		
	}
	  
  }	
  
  public static void main(String[] args) throws Exception {
	JobConf conf=new JobConf(WordCount.class);
	conf.setJobName("wordcount");
	conf.setOutputKeyClass(Text.class);
	conf.setOutputValueClass(IntWritable.class);
	
	conf.setMapperClass(Map.class);
	conf.setReducerClass(Reduce.class);
	conf.setInputFormat(TextInputFormat.class);
	conf.setOutputFormat(TextOutputFormat.class);
	
//	conf.set("mapred.job.tracker","localhost:9001");
//	conf.set("fs.default.name", "hdfs://localhost:9000");
	
	FileInputFormat.setInputPaths(conf, new Path("file01"),new Path("file01"));
	FileOutputFormat.setOutputPath(conf, new Path("file03.txt"));
	

	JobClient.runJob(conf);
	
}

}
