package demo2;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.IOException;

public class DataFlowJob {

    public static void main(String[] args) throws Exception{
        // 1. 初始化配置
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://hadoop10:9000");
        //2. 创建job
        Job job = Job.getInstance(conf);
        job.setJarByClass(DataFlowJob.class);

        //3. 设置输入(读取文件）和输出(输出计算结果)的类
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        //4. 设置输入路径和输出路径
        TextInputFormat.addInputPath(job, new Path("/mapreduce/demo2/phone.log"));
        TextOutputFormat.setOutputPath(job, new Path("/mapreduce/demo2/out"));

        //5. 设置mapper和reducer
        job.setMapperClass(DataFlowMapper.class);
        job.setReducerClass(DataDlowReducer.class);

        // 6. 设置mapper的kv类型和reducer的kv类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(DataFlowBean.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        // 7. 启动job
        boolean b = job.waitForCompletion(true);
        System.out.println(b);

    }

    static class DataFlowMapper extends Mapper<LongWritable, Text,Text,DataFlowBean>{
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            // 输入的vlaue: 1363157985066  13726230503  00-FD-07-A4-72-B8:CMCC  120.196.100.82  24  27  2481  24681  200
            String[] arr = value.toString().split("\t");
            // 输出的 key - value
            //     手机号   DataFlowBean(up属性，down属性)
            context.write(new Text(arr[1]),new DataFlowBean(Integer.parseInt(arr[6]),Integer.parseInt(arr[7])));
        }
    }

    static class DataDlowReducer extends Reducer<Text,DataFlowBean,Text,Text> {
        @Override
        protected void reduce(Text key, Iterable<DataFlowBean> values, Context context) throws IOException, InterruptedException {
            // 输入的 key    手机号
            // 输入的 value  [DataFlowBean对象，DataFlowBean对象,...]
            int sum1 = 0; // 存储上传总流量
            int sum2 = 0; // 存储下载总流量
            for (DataFlowBean bean : values) {
                sum1 += bean.getUp();
                sum2 += bean.getDown();
            }
            // 输出的 key    手机号
            // 输出的 value  上传流量:4962  下载流量:49362  总数据流量:  54324
            context.write(key,new Text("上传流量:" + sum1 + "下载流量:" + sum2 + " 总数据流量: " + (sum1 + sum2) ));
        }
    }
}
