package com.chenbingkang.bigdata.homework1;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.Iterator;

public class Application {

    public static class HadoopMap extends Mapper<Object, Text, Text, FlowBean> {
        public void map(Object key, Text text, Mapper<Object, Text, Text, FlowBean>.Context context) throws IOException, InterruptedException {
            String value = text.toString();
            String[] arr = value.split("\\s+");
            String phone = arr[1];
            int length = arr.length;
            long upFlow = Long.parseLong(arr[length - 3]);
            long downFlow = Long.parseLong(arr[length - 2]);
            long sumFlow = upFlow + downFlow;
            context.write(new Text(phone), new FlowBean(upFlow, downFlow, sumFlow));

        }
    }

    public static class HadoopReduce extends Reducer<Text, FlowBean, Text, FlowBean> {
        private FlowBean flowBean = new FlowBean();

        public void reduce(Text key, Iterable<FlowBean> iterable, Reducer<Text, FlowBean, Text, FlowBean>.Context context) throws IOException, InterruptedException {
            Iterator<FlowBean> iterator = iterable.iterator();
            while (iterator.hasNext()) {
                FlowBean tmp = iterator.next();
                flowBean.addUpFlow(tmp.getUpFlow());
                flowBean.addDownFlow(tmp.getDownFlow());
                flowBean.addSumFlow(tmp.getSumFlow());
            }
            context.write(key, flowBean);
        }
    }

    public static void main(String... args) throws Exception {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf, "WordCount");
        job.setNumReduceTasks(1);
        job.setJarByClass(Application.class);
        job.setMapperClass(Application.HadoopMap.class);
        job.setReducerClass(Application.HadoopReduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(FlowBean.class);
        FileInputFormat.addInputPath(job, new Path("/user/student/chenbingkang/input/"));
        FileOutputFormat.setOutputPath(job, new Path("/user/student/chenbingkang/output/"));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }

}
