package mxx.mr.job5;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * 多个map对应一个reduce
 * @desc 多端输入对应多个map处理，一个reduce就是一个输出
 *
 * 测试 reduce接受两个map的内容，输出 1,a,aa,aaa的效果
 */
public class MRJob5 {
    public static class Mapper1 extends Mapper<LongWritable, Text, Text, Text> {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String outKey = value.toString().split(",")[0];
            String outValue = value.toString().split(",")[1];

            context.write(new Text(outKey), new Text(outValue));
        }
    }

    public static class Mapper2 extends Mapper<LongWritable, Text, Text, Text> {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String outKey = value.toString().split(",")[0];
            String v1 = value.toString().split(",")[1];
            String v2 = value.toString().split(",")[2];
            String outValue = v1 + "," + v2;

            context.write(new Text(outKey), new Text(outValue));
        }
    }

    public static class Reducer1 extends Reducer<Text, Text, Text, Text> {
        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            StringBuilder sb = new StringBuilder();
            for (Text value : values) {
                sb.append(value.toString()).append(",");
            }

            String str = sb.toString();
            String outKey = str.substring(0, str.length()-1);

            context.write(key, new Text(outKey));
        }
    }

    public static void main(String[] args) throws Exception {

        // config
        Configuration configuration = new Configuration();
        configuration.set("mapred.textoutputformat.separator", ",");


        // 输入输出路径
        FileSystem fileSystem = FileSystem.get(configuration);
        // TODO data/job5/output/
        Path outputPath = new Path(args[2]);
        if (fileSystem.exists(outputPath)) {
            fileSystem.delete(outputPath, true);
        }
        // TODO data/job5/input1/
        Path inputPath1 = new Path(args[0]);
        // TODO data/job5/input2/
        Path inputPath2 = new Path(args[1]);


        // job
        Job job = Job.getInstance(configuration);
        job.setJobName("MRJob5Multi");
        job.setJarByClass(MRJob5.class);


        // 多个输入对应多个mapper
        MultipleInputs.addInputPath(job,inputPath1, TextInputFormat.class, Mapper1.class);
        MultipleInputs.addInputPath(job,inputPath2, TextInputFormat.class, Mapper2.class);


        // 一个reducer
        job.setReducerClass(Reducer1.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        // 一个输出
        FileOutputFormat.setOutputPath(job,outputPath);


        // 完成
        job.waitForCompletion(true);
        System.out.println("---完成---");

    }

}
