import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;


public  class NoAcDegreeReducer extends Reducer<Text, NoAcDegreeFlowBean, Text, NoAcDegreeFlowBean>{


    @Override
    protected void reduce(Text key, Iterable<NoAcDegreeFlowBean> values,
                          Context context)
            throws IOException, InterruptedException {



        ArrayList<NoAcDegreeFlowBean> beanList = new ArrayList<NoAcDegreeFlowBean>();

        for (NoAcDegreeFlowBean jobBean : values) {

            // 构造一个新的对象，来存储本次迭代出来的值
            NoAcDegreeFlowBean newBean = new NoAcDegreeFlowBean();
            newBean.set(jobBean.getJob_city(), jobBean.getJob_title(), jobBean.getJob_sub_title(), jobBean.getMax_salary(), jobBean.getMin_edu_level());

            beanList.add(newBean);
            System.out.println(newBean.toString());

        }

        for (NoAcDegreeFlowBean value :
                beanList) {
            if (value.getMin_edu_level().equals("\\N"))
                context.write(key,value);

            }
        }
    }


