package com.qst.mapreduce.wordcount.zuoyemywc.zajiaoban;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

import org.apache.hadoop.mapreduce.Reducer;

public class SanHeYiWordCountDriver {

    public class MyWordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            //super.map(key, value, context);
            System.out.println(key);
            System.out.println(value); //会打印出来Map阶段输入的key值和对应的value值
            String line=value.toString(); //数据的转化
            String[] words=line.split(" ");//拆分成数组
            System.out.println(words);//会打印出来数据word的内容
            // 把一个个单词当成key，把1当成value
            // for循环遍历数组，把word一个个输出
            for (String word:words) {
                Text outkey=new Text(word);
                //outkey.set(word);
                IntWritable outvalue=new IntWritable(1);
                context.write(outkey, outvalue);
            }






        }
    }



    public class MyWordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
        @Override // CTRL+o 生成重写方法
        protected void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            //super.reduce(key, values, context);
            System.out.println(key);
            int total=0;
            for (IntWritable value : values){
                System.out.println(value);
                int count=value.get();//类型转换，获取value的值
                total+=count;
            }
            context.write(key, new IntWritable(total));
        }
    }


    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {

        // 1 获取配置信息以及获取job对象
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);

        // 2 关联本Driver程序的jar
        job.setJarByClass(SanHeYiWordCountDriver.class);

        // 3 关联Mapper和Reducer的jar
        job.setMapperClass(MyWordCountMapper.class);
        job.setReducerClass(MyWordCountReducer.class);

        // 4 设置Mapper输出的kv类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        // 5 设置最终输出kv类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        // 6 设置输入和输出路径
        FileInputFormat.setInputPaths(job, new Path("E:\\changyongruanjian\\dashujuJava\\JavaAPPneirong\\shujukucode\\MapReduceDemo\\MapReduceDemo\\input\\word.txt"));
        FileOutputFormat.setOutputPath(job, new Path("E:\\changyongruanjian\\dashujuJava\\JavaAPPneirong\\shujukucode\\MapReduceDemo\\MapReduceDemo\\output"));

        /*FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));*/

        // 7 提交job
        boolean result = job.waitForCompletion(true);
        System.exit(result ? 0 : 1);
    }
}
