package com.qingguo.MapReduce;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Job;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class avg_sal {
    //    第一步写MYmap类并继承Maper
    public static class Mymap extends Mapper
            <LongWritable, Text, Text, LongWritable> {
        //重写map方法
        @Override
        protected void map(LongWritable k1, Text v1, Context context)
                throws IOException, InterruptedException {
            //将进来的值切分
            String[] split = v1.toString ().split (",");
            String job = split[1];
            Long aLong = Long.valueOf (split[2]);
            context.write (new Text (job), new LongWritable (aLong));
        }
    }

    //*********************************************************************************************
    //第二步写Myreduce并继承Reducer类
    public static class Myreduce extends Reducer
            <Text, LongWritable, Text, LongWritable> {
        //重写reduce方法
        @Override
        protected void reduce(Text k2, Iterable<LongWritable> v2s, Context context)
                throws IOException, InterruptedException {
            long sum = 0;
            long count = 0;
            for (LongWritable longWritable : v2s) {
                sum = sum + longWritable.get ();
                count++;
            }
            long avg = sum / count;
            context.write (k2, new LongWritable (avg));
        }
    }

    //*****************************************************************************
    //第三步，编写程序主入口，组装mapreduce
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        //加载hadoop的配置参数
        Configuration conf = new Configuration ();
        //创建任务的对象(单例设计模式)传入配置参数，并取名字
        Job job = Job.getInstance (conf, avg_sal.class.getName ());
        //设置打包的类
        job.setJarByClass (avg_sal.class);
        //设置读取文件的hdfs路径
        FileInputFormat.addInputPath (job, new Path (args[0]));
        //指定需要执行的map类
        job.setMapperClass (Mymap.class);
        //指定map输出的序列化类型
        job.setMapOutputKeyClass (Text.class);
        job.setMapOutputValueClass (LongWritable.class);
        //指定需要执行的reduce类
        job.setReducerClass (Myreduce.class);
        //指定reduce的序列化类
        job.setOutputKeyClass (Text.class);
        job.setOutputValueClass (LongWritable.class);
        //指定输出的hdfs路径
        FileOutputFormat.setOutputPath (job, new Path (args[1]));
        //提交任务，等待执行完成，并打印执行日志
        job.waitForCompletion (true);
    }

}



