package com.study.hadoop.mapreduce.reduceJoin;

import org.apache.commons.beanutils.BeanUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;

/**
 * @Author huang
 * @Date 2022/1/14 13:22
 * @Version 1.0
 */
public class TableReducer extends Reducer<Text, TableBean, TableBean, NullWritable> {
    @Override
    protected void reduce(Text key, Iterable<TableBean> values, Reducer<Text, TableBean, TableBean, NullWritable>.Context context) throws IOException, InterruptedException {
        //        01 	1001	1   order
        //        01 	1004	4   order
        //        01	小米   	     pd
        List<TableBean> tablist = new ArrayList<>();
        TableBean buffer = new TableBean();

        for (TableBean value : values) {
            if (value.getFlag().equals("order")){
                /**
                 * hadoop迭代器被改写，迭代器中存的是对象的地址，当遍历迭代器时
                 * 会对内存地址上的数据进行改写
                 * 所以不能传地址，只能传数据
                 */
                TableBean tmpTab = new TableBean();
                try {
                    BeanUtils.copyProperties(tmpTab,value);
                    tablist.add(tmpTab);
                } catch (IllegalAccessException e) {
                    e.printStackTrace();
                } catch (InvocationTargetException e) {
                    e.printStackTrace();
                }
            } else {
                try {
                    BeanUtils.copyProperties(buffer,value);
                } catch (IllegalAccessException e) {
                    e.printStackTrace();
                } catch (InvocationTargetException e) {
                    e.printStackTrace();
                }
            }
        }
        for (TableBean tableBean : tablist) {
            tableBean.setPname(buffer.getPname());
            context.write(tableBean,NullWritable.get());
        }
    }
}
