package org.example.hadoop3;

import org.apache.commons.beanutils.BeanUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;

public class ReduceJoinReducer extends Reducer<Text, GoodsOrderBean, GoodsOrderBean, NullWritable> {
    private List<GoodsOrderBean> orders = new ArrayList<>();
    private GoodsOrderBean goodsBean = new GoodsOrderBean();

    @Override
    protected void reduce(Text key, Iterable<GoodsOrderBean> values, Reducer<Text, GoodsOrderBean, GoodsOrderBean, NullWritable>.Context context) throws IOException, InterruptedException {
        for (GoodsOrderBean value : values) {
            if (value.getTitle().equals("order")) {
                GoodsOrderBean bean = new GoodsOrderBean();
                try {
                    BeanUtils.copyProperties(bean, value); // hadoop 利用内存策略 会复制属性到同一个对象
                } catch (IllegalAccessException e) {
                    e.printStackTrace();
                } catch (InvocationTargetException e) {
                    e.printStackTrace();
                }
                orders.add(bean);
            } else {
                try {
                    BeanUtils.copyProperties(goodsBean, value);
                } catch (IllegalAccessException e) {
                    e.printStackTrace();
                } catch (InvocationTargetException e) {
                    e.printStackTrace();
                }

            }

            for (GoodsOrderBean order : orders) {
                order.setPname(goodsBean.getPname());
                context.write(order, NullWritable.get());
            }
            orders.clear();
        }
    }
}
