package com.lxl.testHd.relation.one;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

/**
 * @author ：e_lixilin
 * @date ：2022/2/23 11:10
 * @description：
 * @modified By： 两个学校的共同专业
 */
public class CommonMajor {
    static class SchoolCommonMajorsMapperOne extends Mapper<LongWritable, Text, Text, Text> {
        Text school = new Text();
        Text major = new Text();
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            // 西1大学:软件	戏剧	英语	土木	天文学	数学	考古
            String[] fields = value.toString().split(":");
            school.set(fields[0]);
            String[] majors = fields[1].split("\t");
            for (String str : majors) {
                major.set(str);
                context.write(major,school);
            }
        }

    }

    static class SchoolCommonMajorsReducerOne extends Reducer<Text, Text, Text, Text> {
        Text outKey = new Text();
        Text outVal = new Text();
        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            List<String> list = new ArrayList<>();
            for(Text major : values){
                list.add(major.toString()) ;
            }
            //排序
            Collections.sort(list);
            //把values两两组合，并以的“A-B”形式显示
            for(int i=0;i< list.size()-1;i++)
            {
                for(int j=i+1;j< list.size();j++)
                {
                    outKey.set(list.get(i)+"-"+list.get(j) );
                    outVal.set(key);
                    context.write( outKey,outVal);
                }
            }
        }
    }

    public static void main(String[] args) throws Exception {
        args = new String[]{"D:\\big-data\\relation", "D:\\big-data\\relation\\output"};
        Configuration conf = new Configuration();
        Job job1 = Job.getInstance(conf);

        //MapReduce会根据传入的class参数找到job依赖的jar包
        job1.setJarByClass(CommonMajor.class);

        //指定本程序的Map、Reduce类
        job1.setMapperClass(SchoolCommonMajorsMapperOne.class);
        job1.setReducerClass(SchoolCommonMajorsReducerOne.class);

        //指定map输出数据的key/value类型
        job1.setMapOutputKeyClass(Text.class);
        job1.setMapOutputValueClass(Text.class);

        //指定reduce输出数据的key/value类型
        job1.setOutputKeyClass(Text.class);
        job1.setOutputValueClass(Text.class);

        //指定需要处理文件的hdfs路径
        FileInputFormat.setInputPaths(job1, new Path(args[0]));
        //在MapReduce计算完毕后，将结果输出到hdfs的文件路径
        FileOutputFormat.setOutputPath(job1, new Path(args[1]));
        //向MapReduce提交任务
        boolean result = job1.waitForCompletion(true);
        System.exit(result?0:1);
    }
}
