package cn.hyxy.day03;


import java.io.IOException;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

//1:继承两个类
public class Demo01_WordCountMR extends Configured implements Tool{

	public static void main(String[] args) throws Exception {
		
		int code = ToolRunner.run(new Demo01_WordCountMR(),args);
		System.exit(code);
	}

	@Override
	public int run(String[] args) throws Exception {
		if (args.length!=2) {
			System.err.println("Usage...");
			return -1;
		}
		Configuration configuration=getConf();
		Job job=Job.getInstance(configuration, "统计字符");
		
		//9:设置输入类型
		job.setJarByClass(Demo01_WordCountMR.class);   //maven打包，上传，Linux执行
//		job.setJar("./target/hadoop-2.7.6-0.0.1-SNAPSHOT.jar");   //本地上传，hdfs数据，本地执行
		//10:设置Mapper
		job.setMapperClass(WordCountMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(IntWritable.class);
		//11:设置Reducer
		job.setReducerClass(WordCountReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		//12:输入输出流
//		FileInputFormat.addInputPath(job, new Path("/text1/a.txt"));
		FileInputFormat.addInputPath(job, new Path(args[0])); //自主输入参数（输入路径）
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		//13:判断是否正确完成
		boolean b = job.waitForCompletion(true);
		return b?0:1;
	}

	//2:开发Mapper类：单独的类(public static class
	public static class WordCountMapper  extends Mapper<LongWritable, Text, Text, IntWritable>{
		//3；重写map()方法
		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
				throws IOException, InterruptedException {	
			
			//4:对value进行处理
			String str=value.toString();
			//将读入的文本进行分割
			String[] strs=str.split("\\s+");//  \ 制表符 正则  ；\s  空格  ；+ 表示一个或多个
			//遍历输出每一个字符=1
			for (String string : strs) {
				context.write(new Text(string), new IntWritable(1));
			}
		}
	}
	
	//5：开发一个Reducer类
	//key3=key2,value3=value2
	//k4,v4
	public static class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
		//6:重写reduce()方法
		@Override
		protected void reduce(Text key3, Iterable<IntWritable> value3,
				Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
			//声明int ,对sum进行求和
			int sum=0;
			for (IntWritable intWritable : value3) {
				sum+=intWritable.get();
			}
			//文本输出
			context.write(key3, new IntWritable(sum));			
		}
	}
	
	
	

	


		
	
}
