package com.mapreduce;

import com.bean.IdFlag;
import com.bean.YearTemp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.net.URI;

public class JoinReduce extends Configured implements Tool {
    //        00001,201501,50
//00002,201501,100
//00003,201502,70
//00004,201502,28
//00005,201502,29
//
//KeyValueTextInputFormat->
//k1			v1
//00001		Moon Light,1973
//00002		Scarborough Fair,1970
//00001		201501,50
//00002		201501,100
    static class JoinOneMapper extends
            Mapper<Text, Text, IdFlag, Text>{
        @Override
        protected void map(Text key, Text value, Mapper<Text, Text, IdFlag, Text>.Context context) throws IOException, InterruptedException {
//            key:k1			value:v1
//00001		Moon Light,1973
//00002		Scarborough Fair,1970

            IdFlag idFlag = new IdFlag(key.toString(),0);
            context.write(idFlag,value);
//->
//                  k2				v2
//            IdFlag(00001,0)		Moon Light,1973

//            IdFlag(00002,0)		Scarborough Fair,1970

        }

    }

    static class JoinTwoMapper extends
            Mapper<Text, Text, IdFlag, Text>{
        @Override
        protected void map(Text key, Text value, Mapper<Text, Text, IdFlag, Text>.Context context) throws IOException, InterruptedException {
            //          00001		201501,50
//          00002		201501,100
//            IdFlag(00001,1)		201501,50
            //            IdFlag(00002,1)		201501,100
            IdFlag idFlag = new IdFlag(key.toString(),1);
            context.write(idFlag,value);
        }
    }

    static class JoinReducer extends
            Reducer<IdFlag, Text, Text, Text> {
        @Override
        protected void reduce(IdFlag key, Iterable<Text> values, Reducer<IdFlag, Text, Text, Text>.Context context) throws IOException, InterruptedException {

            StringBuffer buffer = new StringBuffer();
            for (Text val:values){
                String data = val.toString();
                buffer.append(data).append(",");
            }
            buffer.setLength(buffer.length()-1);
            context.write(new Text(key.getId()),new Text(buffer.toString()));
        }
    }


    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator",",");

        // 指定输入输出路径
        Path input1 = new Path(
                "hdfs://192.168.10.11:9000/joindata/one.txt");
        Path input2 = new Path(
                "hdfs://192.168.10.11:9000/joindata/two.txt");

        Path output = new Path(
                "hdfs://192.168.10.11:9000/idoutput");

        FileSystem fs = FileSystem.get(
                new URI("hdfs://192.168.10.11:9000")
                ,conf);
        if (fs.exists(output)) fs.delete(output,true);

//构建Job
        Job job = Job.getInstance(conf);
        job.setJobName("second");
        job.setJarByClass(this.getClass());
        // 设置多个并行的Map
        MultipleInputs.addInputPath(job,input1,
                KeyValueTextInputFormat.class,
                JoinOneMapper.class);
        MultipleInputs.addInputPath(job,input2,
                KeyValueTextInputFormat.class,
                JoinTwoMapper.class);
        job.setMapOutputKeyClass(IdFlag.class);
        job.setMapOutputValueClass(Text.class);

        job.setReducerClass(JoinReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        job.setNumReduceTasks(2);
        // 设置分区器
        job.setPartitionerClass(IdPar.class);
        // 设置分组规则
        job.setGroupingComparatorClass(IdGroup.class);

        TextOutputFormat.setOutputPath(job,output);
        return job.waitForCompletion(true)?0:-1;
    }

    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(new JoinReduce(),args));
    }
}
