package com.etc;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.File;
import java.io.IOException;
import java.util.Arrays;

/**
 * A	I,K,C,B,G,F,H,O,D,
 * B	A,F,J,E,
 * C	A,E,B,H,F,G,K,
 * D	G,C,K,A,L,F,E,H,
 * E	G,M,L,H,A,F,B,D,
 * F	L,M,D,C,G,A,
 * G	M,
 * H	O,
 * I	O,C,
 * J	O,
 * K	B,
 * L	D,E,
 * M	E,F,
 * O	A,H,I,J,F,
 */
public class Friend1 {

    public static class FriendMap extends Mapper<LongWritable, Text,Text,Text> {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            //以"\t"切分 人 和 他的共同好友
            String[] person = line.split("\t");
            //以","把每一个好友分开
            String[] friend = person[1].split(",");
            //按照字母排序
            Arrays.sort(friend);
            ////该循环实现  例如，遍历第一行数据 A I,K,C,B,G,F,H,O,D, 时，i = 0，j = 1，split1[0]=I，split1[1]=K，I和K的共同好友是A
            for (int i = 0; i < friend.length - 1; i++) {
                //因为最后一个不进行组合所以-1
                for (int j = i + 1; j < friend.length; j++) {
                    context.write(new Text(friend[i] + "-" + friend[j]), new Text(person[0]));
                }
            }
        }
    }

        public static class FriendReduce extends Reducer<Text,Text,Text,Text> {
            @Override
            protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
                StringBuffer sb = new StringBuffer();
                for(Text friend : values ){
                    sb.append(friend.toString()).append(" ");
                }
                context.write(new Text(key), new Text(sb.toString()));

            }
        }
        public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
            Configuration conf = new Configuration();
            //创建一个mapreduce的工作
            Job job = Job.getInstance(conf);
            job.setJarByClass(Friend1.class);
            //map的class在哪里
            job.setMapperClass(Friend1.FriendMap.class);
            //reduce的class在哪里
            job.setReducerClass(Friend1.FriendReduce.class);
            //map输出的key和value的类型
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(Text.class);
            //reduce输出的key和value的类型
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);

            File file = new File("F:\\out");
            if (file.exists()) {
                FileUtils.deleteDirectory(file);
            }
            //计算的文件在哪里
            FileInputFormat.setInputPaths(job, new Path("F:\\output"));
            //计算的结果在哪里
            FileOutputFormat.setOutputPath(job, new Path("F:\\out"));

            job.waitForCompletion(true);
        }
    }

