package common_aggregation;


import Utils.FileUtil;
import data_manipulate.TopOnRNG;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/*
file prepration:
name subject score
zhangsan chinese 89
lisi math 94
wangwu science 90
zhaoliu science 92
maqi history 89
chenba geography 78
lisi chinese 75
lisi science 96
zhaoliu history 80
zhangsan math 92
chenba history 83
chenba math 84
wangwu chinese 99
wangwu math 89
maqi chinese 90
maqi math 88

task: calculation average score of each student
 */
public class Avg {

    public static String inputPath = "hdfs://localhost:9000/agg/score";
    public static String outputPath = "hdfs://localhost:9000/agg/avg_output";

    public static void main(String[] args)
            throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();
        conf.setInt("top", 5);
        FileUtil fileUtil = new FileUtil(conf, outputPath);
        fileUtil.doError(inputPath);
        fileUtil.doDelete(outputPath);

        Job job = Job.getInstance(conf, "avg");
        job.setJarByClass(Avg.class);

        job.setMapperClass(doMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setReducerClass(doReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileInputFormat.setInputPaths(job, new Path(inputPath));
        FileOutputFormat.setOutputPath(job, new Path(outputPath));

        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }

    public static class doMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
                throws IOException, InterruptedException {
            String[] splits = value.toString().trim().split(" ");
            // name subject score
            Text name = new Text(splits[0]);
            IntWritable score = new IntWritable(Integer.parseInt(splits[2]));
            context.write(name, score);
        }
    }

    public static class doReducer extends Reducer<Text, IntWritable, Text, IntWritable> {

        int sum;
        int cnt;

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, Text, IntWritable>.Context context)
                throws IOException, InterruptedException {
            sum = 0;
            cnt = 0;
            for (IntWritable value : values) {
                cnt += 1;
                sum += value.get();
            }
            context.write(key, new IntWritable(sum / cnt));
        }
    }
}
