package com.klun.hdfs.hd.reducer;

import com.klun.hdfs.hd.pojo.TableBean;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;

public class TableReducer extends Reducer<Text, TableBean,TableBean, NullWritable> {

    @Override
    protected void reduce(Text key, Iterable<TableBean> values, Context context) throws IOException, InterruptedException {

        ArrayList<TableBean> orderBeans = new ArrayList<>();

        TableBean pdBean = new TableBean();

        for (TableBean bean : values) {
            if("0".equals(bean.getFlag())){ // 订单表
                TableBean orderBean = new TableBean();
                try {
                    BeanUtils.copyProperties(orderBean,bean);
                } catch (Exception e) {
                    e.printStackTrace();
                }
                orderBeans.add(orderBean);
            }else{
                try {
                    BeanUtils.copyProperties(pdBean,bean);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
        for (TableBean bean : orderBeans) {
            bean.setPname(pdBean.getPname());
            context.write(bean,NullWritable.get());
        }


    }
}
