package com.zha;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;

public class Friends {
//Map 阶段
    public static class FriendsMap extends Mapper<LongWritable, Text,Text,Text>{
    Text k = new Text();
    Text v = new Text();
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
       //以空格切分，有时会因为编码格式的问题导致错误，所以要写成  \t
        String [] s = value.toString().split("\t");
//        String a = s[0];
//        String[] b = s[1].split(" ");
        /**
         * 这里取到的值为B-B	A
         * B-C	A
         * B-D	A
         * C-B	A
         * C-C	A
         * C-D	A
         * A-A	C
         * A-B	C
         * 以空格切分的话得到的是B-C和A，与前面的 - 无关
         */
        k.set(s[0]);
        v.set(s[1]);
        context.write(k,v);

    }
}



   //Reduce阶段
    public static class FriendsReduce extends Reducer<Text,Text,Text,Text>{
       @Override
       protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
           /**
            *具体的StringBuffer见笔记
            */
           StringBuffer buffer = new StringBuffer();
           for (Text value : values) {
               buffer.append(value+",");//拼接作用，以逗号隔开
           }
           context.write(key,new Text(buffer.toString()));
           }

       }

    /**
     * 结果是：
     * A-A	C,  由于所用例子数据少，所以逗号后面没有东西
     * A-B	C,
     * B-B	A,
     * B-C	A,
     * B-D	A,
     * C-B	A,
     * C-C	A,
     * C-D	A,
     * @param args
     */


    public static void main(String[] args) {
        Configuration configuration = new Configuration();
        try {
            //新建一个job工作
            Job job = new Job(configuration);
            //设置要运行的类
            job.setJarByClass(Friends.class);
            //设置要运行的Map类
            job.setMapperClass(FriendsMap.class);
            //设置要运行的Reducer类
            job.setReducerClass(FriendsReduce.class);

            //设置输出的key
            job.setMapOutputKeyClass(Text.class);
            //设置输出的value
            job.setMapOutputValueClass(Text.class);

            //查看文件是否存在
            File file = new File("d:/file/outputt");
            if (file.exists()){
                FileUtil.fullyDelete(file);
            }

            //输入数据的目录
            FileInputFormat.addInputPath(job,new Path("d:/file/output"));
            //输出数据的目录
            FileOutputFormat.setOutputPath(job,new Path("d:/file/outputt"));
            //设置运行的reduce个数
            job.setNumReduceTasks(1);
            //执行完退出
            System.exit(job.waitForCompletion(true)?0:1);

        }catch (Exception e){
            e.printStackTrace();
        }
    }


}
