import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Iterator;

/**
 * @ClassName TimeReduce
 * @Author 真正的小明被占用了
 * @Date 2018/12/16/016 20:13
 * @Version 1.0
 */
public class TimeReduce  extends org.apache.hadoop.mapreduce.Reducer implements Reducer<Text, LongWritable,Text,Text> {

    private Hashtable<String, ArrayList<Long>> hashtable;
    public TimeReduce(){
        hashtable=new Hashtable<>();
        hashtable.put("1",new ArrayList<>(24));
        hashtable.put("2",new ArrayList<>(24));
        hashtable.put("3",new ArrayList<>(24));
        hashtable.put("4",new ArrayList<>(24));
        hashtable.put("5",new ArrayList<>(24));
        hashtable.put("6",new ArrayList<>(24));
        hashtable.put("7",new ArrayList<>(24));
    }

    @Override
    public void reduce(Text text, Iterator<LongWritable> iterator, OutputCollector<Text, Text> outputCollector, Reporter reporter) throws IOException {
         String key =text.toString();//得到key值即为map函数处理完后的week和hour参数的拼接
         String week=key.split(",")[0];//得到星期
         String hour=key.split(",")[1];//得到小时数
         Long T= iterator.next().get();
         //更改列表值
         hashtable.get(week).set(new Integer(hour),hashtable.get(week).get(new Integer(hour))+T);
         outputCollector.collect(new Text(key),new Text(hashtable.get(week).toString()));//输出
    }

    @Override
    public void close() throws IOException {

    }

    @Override
    public void configure(JobConf jobConf) {

    }
}
