package com.shujia.mr.avgcount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/*
    练习题：求每个人每一天平均消费,如果遇到异常数据，money异常，就回填为10
    数据：
    张三,12月3号,20
    张三,12月3号,10
    李四,12月3号,12
    王五,12月3号,10
    王五,12月2号
    王五,12月2号,30
    王二麻,12月2号,0
    王二麻,12月2号,,
    王二麻,12月2号, ,
 */
class CountMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {
        //张三,12月3号,20
        //王五,12月2号
        //王二麻,12月2号,,
        //王二麻,12月2号, ,
        String line = value.toString();
        String[] infos = line.split(",", -1);
        switch (infos.length) {
            case 2:
            case 4:
                context.write(new Text(infos[0] + "-" + infos[1]), new LongWritable(10L));
                break;
            case 3:
                context.write(new Text(infos[0] + "-" + infos[1]), new LongWritable(Long.parseLong(infos[2])));
                break;
        }
    }
}

class CountReducer extends Reducer<Text, LongWritable, Text, LongWritable> {
    @Override
    protected void reduce(Text key, Iterable<LongWritable> values, Reducer<Text, LongWritable, Text, LongWritable>.Context context) throws IOException, InterruptedException {
        long sum = 0;
        long count = 0;
        for (LongWritable value : values) {
            sum += value.get();
            count++;
        }

        context.write(key, new LongWritable(sum / count));

    }
}

public class CountDemo {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();

        conf.set("fs.defaultFS", "hdfs://master:9000");

        Job job = Job.getInstance(conf);
        // 设置reduce的个数
//        job.setNumReduceTasks(4);

        job.setJarByClass(CountDemo.class);

        job.setJobName("用户日平均消费");

        job.setMapperClass(CountMapper.class);

        job.setReducerClass(CountReducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(LongWritable.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);

        FileInputFormat.setInputPaths(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]));

        boolean b = job.waitForCompletion(true);
        if(b){
            System.out.println("32期 用户日平均消费mapreduce实现执行成功！>_-");
        }else {
            System.out.println("32期 用户日平均消费mapreduce实现执行失败！T_T");
        }

    }
}
