package com.atguigu.mapreduce.shiyan;

import com.atguigu.mapreduce.JobConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.*;

public class FamilyRelations {
    public static class MyHashMap<K> extends HashMap<K,String> {
        /**
         * 使用HashMap中containsKey判断key是否已经存在
         * @param key
         * @param value
         * @return
         */
        @Override
        public String put(K key, String value) {
            String newV = value;
            if (containsKey(key)) {
                String oldV = get(key);
                newV = oldV + "," + newV;
            }
            return super.put(key, newV);
        }
    }
    
    public static class FamilyMapper extends Mapper<Object, Text, Text, Text>{
        private MyHashMap<String> parentChildMap = new MyHashMap<>();
        @Override
        public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
            // 跳过表头
            if (value.toString().contains("child")){
                return;
            }
            String[] childParent = value.toString().split(" ");
            String child = childParent[0];
            String parent = childParent[1];
            // 建立父母与孩子的关系
            parentChildMap.put(child, parent);
        }

        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            // 在Mapper结束时，将祖孙辈关系输出
            for (Map.Entry<String, String> entry1 : parentChildMap.entrySet()) {
                String child = entry1.getKey();
                String[] parents = entry1.getValue().split(",");
                for (String parent : parents) {
                    String grandparent = parentChildMap.get(parent);
                    if (grandparent != null) {
                        context.write(new Text(child), new Text(grandparent));
                    }
                }
            }
        }
    }

    public static class FamilyReducer extends Reducer<Text,Text,Text,Text> {
        private static boolean headerWritten = false;
        @Override
        public void reduce(Text key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
            // 写个表头
            if (!headerWritten) {
                // 写入表头
                context.write(new Text("grandchild"), new Text("grandparent"));
                headerWritten = true;
            }
            // 实际数据
            for (Text value : values) {
                String[] grandparents = value.toString().split(",");
                for (String grandparent : grandparents) {
                    context.write(key, new Text(grandparent));
                }
            }
        }
    }

    public static void main(String[] args) throws Exception {
        Job job = JobConfig.getJob(null,FamilyRelations.class, FamilyMapper.class, FamilyReducer.class, Text.class, Text.class);
        job.setCombinerClass(FamilyReducer.class);
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}
