package com.shujia.mr.sortPartition;

import com.shujia.mr.partitioner.PartitionerDriver;
import com.shujia.mr.partitioner.PartitionerMapper;
import com.shujia.mr.partitioner.PartitionerReducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class SortPartitionDriver {
    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();
        Job job = new Job(conf);
        job.setJobName("idea-sortPartitioner");
        job.setJarByClass(SortPartitionDriver.class);

        // 2.告诉Job对应的Map和Reduce具体使用哪个类
        job.setMapperClass(SortPartitionMapper.class); // Class<? extends Mapper
        job.setReducerClass(SortPartitionReducer.class);

        // 3.告诉Job Map端输出的数据类型 和 Reduce端输出的数据类型(最终输出数据类型)
        // java.lang.ClassCastException: interface javax.xml.soap.Text 类型转换错误
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(Student.class);

        // 大于 1 并且
        job.setNumReduceTasks(5);

        // 设置自定义分区类
//        job.setPartitionerClass(MyPartitioner.class);

        job.setPartitionerClass(
                new Partitioner<Text, Text>() {
                    @Override
                    public int getPartition(Text key, Text value, int numPartitions) {
                    // 1500100008	符半双,22,女,理科六班,363
                        String[] split = value.toString().split(",");
                        int age = 0;
                        if (split.length >= 5) {
                            try {
                                age = Integer.parseInt(split[1]);
                            } catch (Exception e) {
                                System.out.println("age年龄解析错误，捕获异常：" + e.getMessage());
                                age = -1;
                            }
                        }

                        if (age > 0) {
                            if (age == 21){
                                return 1;
                            } else if (age == 22) {
                                return 2;
                            } else if (age == 23) {
                                return 3;
                            }else {
                                return 4;
                            }
                        } else {
                            return 0;
                        }


//                        return 0;
                    }
                }.getClass());


        // 4.需要告诉MapReduce数据所在位置  输入数据位置  输出数据位置
        // 作业：将MapJoin添加到该Job中
        FileInputFormat.addInputPath(job, new Path("hadoopCode/output/reduceJoin/part-r-00000")); // 学生基本信息数据
        FileOutputFormat.setOutputPath(job, new Path("hadoopCode/output/sortPartitioner")); //关联结果


        // 5.做代码提交
        //  在本地IDEA中执行，需要有Hadoop环境，并且能获取到winUtils
        //  Process finished with exit code 0  退出代码为0 并不代表当前任务执行成功
        job.waitForCompletion(true);
    }
}
