package com.zha;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;

public class HaoYou {

    //  Map 阶段
    public static class HaoYouMap extends Mapper<LongWritable, Text,Text,Text>{
             Text k = new Text();
             Text v = new Text();

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] UserAndFriends = value.toString().split(":");
            String User = UserAndFriends[0];
            String[] friends = UserAndFriends[1].split(",");
            v.set(User);
            for (String f : friends){
                k.set(f);
                context.write(k,v);
            }
        }
    }



    //Reduce阶段
    public static class HaoYouReduce extends Reducer<Text,Text,Text,Text>{
        @Override
        protected void reduce(Text friend, Iterable<Text> users, Context context) throws IOException, InterruptedException {
            ArrayList<String>Userlist = new ArrayList<String>();
            for (Text User:users){
                Userlist.add(User.toString());
            }
            Collections.sort(Userlist);
            for (int i = 0; i <Userlist.size()-1 ; i++) {
                for (int j = 0; j <Userlist.size() ; j++) {
                    context.write(new Text(Userlist.get(i)+"-"+Userlist.get(j)),friend);
                }
            }
        }
    }

    /**
     * 结果
     * B-B	A
     * B-C	A
     * B-D	A
     * C-B	A
     * C-C	A
     * C-D	A
     * A-A	C
     * A-B	C
     * @param args
     */
    public static void main(String[] args) {
        Configuration configuration = new Configuration();
        try {
            Job job = new Job();
            //设置运行的类
            job.setJarByClass(HaoYou.class);
            //设置运行的Map类
            job.setMapperClass(HaoYouMap.class);
            //设置运行的Reduce类
            job.setReducerClass(HaoYouReduce.class);

            //设置要输出的key类型
            job.setMapOutputKeyClass(Text.class);
            //设置输出的Values的类型
            job.setOutputValueClass(Text.class);

            //查看文件是否存在，存在则删除
            File file = new File("d:/file/output");
            if (file.exists()){
                FileUtil.fullyDelete(file);
            }


            //输入数据的目录
            FileInputFormat.addInputPath(job,new Path("d:/file/input"));
            //输出数据的目录
            FileOutputFormat.setOutputPath(job,new Path("d:/file/output"));
            //设置rudece任务个数，默认为1（一般越多效率越高）
            job.setNumReduceTasks(1);
            //执行完毕退出
            System.exit(job.waitForCompletion(true)? 0:1);
        }catch (Exception e){
            e.printStackTrace();
        }
    }
}
