package org.example.hadoop.service.priceScopeFrequency;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * @author wwy
 * @date 2020-11-10 21:05
 */
public class PriceScopeFrequencyReducer extends Reducer<IntWritable, IntWritable, Text, IntWritable> {
    private static int startNum = 0;
    private static int endNum = 10;
    private int step = 10;
    private static int countNum = 0;
    private static boolean isSave = false;
    /**
     * k2       v2
     * 20     1,1,1
     *
     * k3      v3
     * 0-10     3
     * @param key
     * @param values
     * @param context
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    protected void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        int keyInt = key.get();
        if(keyInt>100){
            if(!isSave){
                String key3 = startNum+"~"+endNum;
                context.write(new Text(key3),new IntWritable(countNum));
                countNum = 0;
                isSave = true;
            }
            for (IntWritable value : values) {
                countNum += value.get();
            }
            String key3 = ">100";
            if(!context.nextKey()){
                context.write(new Text(key3),new IntWritable(countNum));
            }
        }else{
            if(keyInt<=endNum && keyInt>=startNum){
                for (IntWritable value : values) {
                    countNum += value.get();
                }
            }else{
                String key3 = startNum+"~"+endNum;
                context.write(new Text(key3),new IntWritable(countNum));
                startNum = startNum+step;
                endNum = endNum+step;
                countNum = 0;
                for (IntWritable value : values) {
                    countNum += value.get();
                }
            }
        }

    }
}
