package com.mapreduce;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat;
import org.apache.hadoop.mapreduce.lib.join.TupleWritable;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.net.URI;

public class JoinMapReduce extends Configured implements Tool {
    static class JoinMapper extends
            Mapper<Text, TupleWritable, Text, Text>{
        @Override
        protected void map(Text key, TupleWritable value, Mapper<Text, TupleWritable, Text, Text>.Context context) throws IOException, InterruptedException {
            StringBuffer buffer = new StringBuffer();
            for (Writable val:value){
                buffer.append(val.toString()).append(",");
            }
            buffer.setLength(buffer.length()-1);
            context.write(key, new Text(buffer.toString()));
        }
    }
    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator",",");

        // 指定输入输出路径
        // 指定要进行连接的两个文件，这两个文件分别要满足三个要求
        // 两个数据集都是大的数据集，不能用缓存文件的方式。
        // 数据集都是按照相同的键进行排序；
        // 数据集有相同的分区数，同一个键的所有记录在同一个分区中，输出文件不可分割；
        Path input1 = new Path(
                "hdfs://192.168.10.11:9000/joinone");
        Path input2 = new Path(
                "hdfs://192.168.10.11:9000/jointwo");
        // 设置连接方式为内连接
        String expr = CompositeInputFormat.compose("inner",
                KeyValueTextInputFormat.class,input1,input2);
        conf.set("mapreduce.join.expr",expr);
        Path output = new Path(
                "hdfs://192.168.10.11:9000/join");

        FileSystem fs = FileSystem.get(
                new URI("hdfs://192.168.10.11:9000")
                ,conf);
        if (fs.exists(output)) fs.delete(output,true);
        //构建Job
        Job job = Job.getInstance(conf);
        job.setJobName("join");
        job.setJarByClass(this.getClass());

        job.setMapperClass(JoinMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setNumReduceTasks(0);

        job.setInputFormatClass(CompositeInputFormat.class);

        TextOutputFormat.setOutputPath(job,output);

        return job.waitForCompletion(true)?0:-1;
    }

    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(new JoinMapReduce(),args));
    }
}
