package hadoop.hdfs.demo;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * @author qiss
 * @create 2018-02-12 14:23
 * 在jar包处使用hadoop jar jarname  mainClass,mainClass要使用全名
 **/
public class WordCountRunner {
    public static void main(String[] args) throws Exception{
        Configuration configuration = new Configuration();
        Job job = Job.getInstance(configuration);

        //job.setJar("/usr/local/software/hadoop-2.7.3/output/test/demo.jar");
        job.setJarByClass(WordCountRunner.class);

        job.setMapperClass(WordCountMapper.class);
        job.setReducerClass(WordCountReduce.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileInputFormat.setInputPaths(job,"hdfs://hadoop-namenode1:9000/README.txt");
        FileOutputFormat.setOutputPath(job,new Path("hdfs://hadoop-namenode1:9000/demo/wdcount/output"));

        boolean b = job.waitForCompletion(true);
        System.exit(b?0:1);
    }

    //该地方必须使用静态类不然会报错
    static class WordCountMapper extends Mapper<LongWritable,Text,Text,IntWritable>{
        //通过枚举形式定义自定义计数器
        enum MyCounter{MALFORORMED,NORMAL}

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] words = value.toString().split(" ");
            //对枚举定义的自定义计数器加1
            context.getCounter(MyCounter.MALFORORMED).increment(1);
            //通过动态设置自定义计数器加1
            context.getCounter("counterGroupa", "countera").increment(1);
            for (String word : words) {
                context.write(new Text(word),new IntWritable(1));
            }
        }
    }

    static class WordCountReduce extends Reducer<Text,IntWritable,Text,IntWritable>{
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int count = 0;
            for (IntWritable value : values) {
                count += value.get() ;
            }
            context.write(key,new IntWritable(count));
        }
    }
}
