package com.devilvan.customPartition;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class CustomPartitionReducer extends Reducer<Text, FlowEntity, Text, NullWritable> {
    private Text text;

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        text = new Text();
    }

    @Override
    protected void reduce(Text key, Iterable<FlowEntity> values, Context context) throws IOException, InterruptedException {
        Long reportTime = 0L;
        String msiSdn = null;
        String apMac = null;
        String acMac = null;
        String host = null;
        String siteType = null;
        Long upPackNum = 0L;
        Long downPackNum = 0L;
        Long upPayLoad = 0L;
        Long downPayLoad = 0L;
        String httpStatus = null;
        for (FlowEntity value : values) {
            reportTime = value.getReportTime();
            msiSdn = value.getMsiSdn();
            apMac = value.getApMac();
            acMac = value.getAcMac();
            host = value.getHost();
            siteType = value.getSiteType();
            upPackNum = value.getUpPackNum();
            downPackNum = value.getDownPackNum();
            upPayLoad = value.getUpPayLoad();
            downPayLoad = value.getDownPayLoad();
            httpStatus = value.getHttpStatus();
        }
        text.set(reportTime + "\t" +
                msiSdn + "\t" +
                apMac + "\t" +
                acMac + "\t" +
                host + "\t" +
                siteType + "\t" +
                upPackNum + "\t" +
                downPackNum + "\t" +
                upPayLoad + "\t" +
                downPayLoad + "\t" +
                httpStatus);
        context.write(key,NullWritable.get());
    }
}
