import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;


public  class DegreeReducer extends Reducer<Text, DegreeFlowBean, Text, DegreeFlowBean>{


    @Override
    protected void reduce(Text key, Iterable<DegreeFlowBean> values,
                          Context context)
            throws IOException, InterruptedException {



        ArrayList<DegreeFlowBean> beanList = new ArrayList<DegreeFlowBean>();

        for (DegreeFlowBean jobBean : values) {

            // 构造一个新的对象，来存储本次迭代出来的值
            DegreeFlowBean newBean = new DegreeFlowBean();
            newBean.set(jobBean.getJob_city(), jobBean.getJob_title(), jobBean.getJob_sub_title(), jobBean.getMax_salary(), jobBean.getMin_edu_level());

            beanList.add(newBean);
            System.out.println(newBean.toString());

        }

        for (DegreeFlowBean value :
                beanList) {
                context.write(key,value);

        }
    }
}


