package com.lvw.hadoop.mapreduce;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class WordCount2App {

    /**
     * 定义map
     */
    public static class MyMapper extends Mapper<LongWritable, Text, Text, LongWritable> {

        static final LongWritable one = new LongWritable(1);

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

            // 接收到的每一行
            String s = value.toString();

            // 按照空格进行拆分
            String[] words = s.split(" ");

            for (String word : words) {
                // 通过上下文输出结果
                context.write(new Text(word), one);
            }


        }
    }

    public static class MyReducer extends Reducer<Text, LongWritable, Text, LongWritable> {
        @Override
        protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
            long sum = 0;

            // 统计单词的出现次数结果
            for (LongWritable value : values) {
                sum += value.get();
            }

            context.write(key, new LongWritable(sum));
        }
    }


    /**
     * 定义driver
     */
    public static void main(String[] args) throws Exception{

        // create config
        Configuration configuration = new Configuration();

        // delete existed dir
        Path path = new Path(args[1]);
        FileSystem fileSystem = FileSystem.get(configuration);
        if (fileSystem.exists(path)) {
            fileSystem.delete(path, true);
            System.out.println("Deleting existed dir.");
        }

        // creat job
        Job job = Job.getInstance(configuration, "word count");

        // set handle class for job
        job.setJarByClass(WordCount2App.class);

        // set input file
        FileInputFormat.setInputPaths(job, new Path(args[0]));

        // set parameter for map
        job.setMapperClass(MyMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(LongWritable.class);

        job.setReducerClass(MyReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);

        // set output file
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        // submit
        System.out.println(job.waitForCompletion( true) ? 1: 0);
    }


}
