package com.shujia.mr.tuijian;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/*
    map输入：
    <0L, tom hello hadoop cat>
    map输出:
    <tom-hello, 1>
    <tom-hadoop, 1>
    <tom-cat, 1>
    <hello-hadoop,-1>
    <hello-cat,-1>
    <hadoop-cat,-1>

    hello tom world hive mr
    <hello-tom, 1>
    ...
 */
class PersonMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {
        // value - "tom hello hadoop cat"
        String line = value.toString();
        String[] persons = line.split(" ");
        for (int i = 0; i < persons.length - 1; i++) {
            for (int j = i + 1; j < persons.length; j++) {
                String guanXi = biJiao(persons[i], persons[j]);
                if(i==0){
                    //直接好友
                    context.write(new Text(guanXi), new LongWritable(1L));
                }else {
                    //间接好友
                    context.write(new Text(guanXi), new LongWritable(-1L));
                }
            }
        }
    }

    // hello tom
    // tom hello
    public String biJiao(String s1,String s2){
        if(s1.compareTo(s2)<0){
            return s1+"-"+s2; // hello-tom
        }else {
            return s2+"-"+s1; // hello-tom
        }
    }
}

//<hello-tom, [1,1,1,1,-1,-1,1]>
class PersonReducer extends Reducer<Text,LongWritable,Text, Text>{
    @Override
    protected void reduce(Text key, Iterable<LongWritable> values, Reducer<Text, LongWritable, Text, Text>.Context context) throws IOException, InterruptedException {
        boolean flag = true;
        for (LongWritable value : values) {
            long i = value.get();
            if(i==1){
                context.write(key,new Text("直接好友"));
                flag = false;
                break;
            }
        }
        if(flag){
            context.write(key,new Text("间接好友"));
        }
    }
}


public class PersonDemo {
    public static void main(String[] args) throws Exception{
        //获取Hadoop环境配置对象
        Configuration conf = new Configuration();

        //设置主节点
        conf.set("fs.defaultFS", "hdfs://master:9000");

        //创建Job作业
        Job job = Job.getInstance(conf);

        job.setJarByClass(PersonDemo.class);

        //设置当前job作业的名字
        job.setJobName("32期 好友关系分析案例mapreduce实现");

        //设置当前作业要执行的map类
        job.setMapperClass(PersonMapper.class);
        //设置当前作业要执行的reduce类
        job.setReducerClass(PersonReducer.class);

        //设置Map任务输出的键值类型和上面指定的map类输出键值类型一一对应
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(LongWritable.class);

        //设置Reduce任务输出的键值类型和上面指定的reduce类输出键值类型一一对应
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        // 设置数据读取的路径【hdfs上的路径】
        FileInputFormat.setInputPaths(job, new Path(args[0]));
        // 设置数据输出路径
        FileOutputFormat.setOutputPath(job, new Path(args[1]));


        // 提交作业到yarn上
        boolean b = job.waitForCompletion(true);
        if(b){
            System.out.println("32期 好友关系分析案例mapreduce实现执行成功！>_-");
        }else {
            System.out.println("32期 好友关系分析案例mapreduce实现执行失败！T_T");
        }
    }
}
