package yeliuhuishi.Join.ReduceJoin;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import yeliuhuishi.File.DelAllFile;
import yeliuhuishi.Join.Info;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

/**
 * Reduce端join
 * map统计两张表，然后把数据给reduce处理。
 * <p>
 * 1、思路:
 * 通过将关联的条将关联的条件作为map输出的key，将两表满足join条件的数据并携带数据所来源的文件信息，发往同一个
 * reduce task，在reduce中进行数据串联。
 * 2、优缺点：
 * join的操作是在reduce阶段完成，reduce端的处理压力太大，map节点的运算符合则很低，
 * 资源利用率不高，而且reduce阶段极易产生数据倾斜。
 */
public class ReduceJoin {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        String input = "src/main/java/yeliuhuishi/Join/ReduceJoin/input";
        String output = "src/main/java/yeliuhuishi/Join/ReduceJoin/output";
        DelAllFile.delAllFile(new File(output));

        Configuration conf = new Configuration();

        Job job = Job.getInstance(conf, ReduceJoin.class.getName());

        job.setJarByClass(ReduceJoin.class);
        job.setMapperClass(MyMapJoin.class);
        job.setReducerClass(MyReduceJoin.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Info.class);

        job.setOutputKeyClass(Info.class);
        job.setOutputValueClass(NullWritable.class);

        FileInputFormat.setInputPaths(job, new Path(input));
        FileOutputFormat.setOutputPath(job, new Path(output));

        System.out.println(job.waitForCompletion(true) ? "成功" : "失败");
    }

    public static class MyMapJoin extends Mapper<LongWritable, Text, Text, Info> {
        Info info = new Info();
        Text pidKey = new Text();

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] split = value.toString().split(",");

            // 从上下文对象中获取当前kv的切片信息
            FileSplit inputSplit = (FileSplit) context.getInputSplit();
            // 根据文件切片得当前切片的文件名
            String fileName = inputSplit.getPath().getName();


            // 判断当前是否是product_info文件
            if ("product_info.txt".equals(fileName)) {
                pidKey.set(split[0]);
                info.setpId(split[0]);
                info.setpName(split[1]);
                info.setPrice(Float.parseFloat(split[2]));
                info.setProduceArea(split[3]);
                info.setFlag("1");
            } else {
                pidKey.set(split[1]);

                info.setpId(split[1]);
                info.setuId(split[0]);
                info.setDateStr(split[2]);
                info.setClickArea(split[3]);
                info.setFlag("0");
            }

            context.write(pidKey, info);
        }
    }

    public static class MyReduceJoin extends Reducer<Text, Info, Info, NullWritable> {
        Info info = new Info();
        List<Info> list = new ArrayList<Info>();

        @Override
        protected void reduce(Text key, Iterable<Info> values, Context context) throws IOException, InterruptedException {
            for (Info value : values) {
                if (value.getFlag().equals("1")) {//为product_info
                    info.setpId(value.getpId());
                    info.setpName(value.getpName());
                    info.setPrice(value.getPrice());
                    info.setProduceArea(value.getProduceArea());
                }
                if (value.getFlag().equals("0")) {// 为page_click_log
                    Info info1 = new Info();
                    info1.setuId(value.getuId());
                    info1.setpId(value.getpId());
                    info1.setDateStr(value.getDateStr());
                    info1.setClickArea(value.getClickArea());
                    list.add(info1);
                }
            }
            // 对Info列表进行遍历，然后把product中的数据进行赋值
            for (Info info1 : list) {
                info.setuId(info1.getuId());
                info.setpId(info1.getpId());
                info.setDateStr(info1.getDateStr());
                info.setClickArea(info1.getClickArea());
            }
            context.write(info, NullWritable.get());
        }
    }
}




