package com.mapreduce;

import com.bean.AvgTemp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;

import java.io.IOException;
import java.net.URI;

public class JobNumSum extends Configured implements Tool {
    static class JobNumSumMapper extends
            Mapper<Text, Text, Text, AvgTemp> {
        @Override
        protected void map(Text key, Text value, Mapper<Text, Text, Text, AvgTemp>.Context context) throws IOException, InterruptedException {
            AvgTemp avgTemp = new AvgTemp(
                    1,Integer.parseInt(value.toString()));
            context.write(key,avgTemp);
        }
    }
    // SequenceFile
    static class JobNumSumReducer extends
            Reducer<Text, AvgTemp, Text, AvgTemp> {
        @Override
        protected void reduce(Text key, Iterable<AvgTemp> values, Reducer<Text, AvgTemp, Text, AvgTemp>.Context context) throws IOException, InterruptedException {
            // 提前combiner， combiner相当于reduce，
            // 但是combiner是在map所在的机器上运行的
            int num = 0;
            int sum = 0;
            for (AvgTemp val:values){
//           统计出现次数
                num += val.getNum();
//           统计温度值总和
                sum += val.getSum();
            }
            context.write(key,new AvgTemp(num,sum));
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator",",");

        // 指定输入输出路径
        Path input1 = new Path(
                "hdfs://192.168.10.11:9000/sortdata");
        Path output1 = new Path(
                "hdfs://192.168.10.11:9000/snoutput");

        FileSystem fs = FileSystem.get(
                new URI("hdfs://192.168.10.11:9000")
                ,conf);
        if (fs.exists(output1)) fs.delete(output1,true);

        //构建Job
        Job job1 = Job.getInstance(conf);
        job1.setJobName("avg");
        job1.setJarByClass(this.getClass());
        // 指定map类的配置
        job1.setMapperClass(JobNumSumMapper.class);
        job1.setMapOutputKeyClass(Text.class);
        job1.setMapOutputValueClass(AvgTemp.class);
        // 指定reduce类的配置
        job1.setReducerClass(JobNumSumReducer.class);
        job1.setOutputKeyClass(Text.class);
        job1.setOutputValueClass(AvgTemp.class);

        job1.setInputFormatClass(KeyValueTextInputFormat.class);
        KeyValueTextInputFormat.addInputPath(job1,input1);

        job1.setOutputFormatClass(SequenceFileOutputFormat.class);
        SequenceFileOutputFormat.setOutputPath(job1,output1);
        return job1.waitForCompletion(true)?0:-1;
    }
}
