package com.example.demo.hadoop.mapreduce;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

public class SearchStar extends Configured implements Tool {

    // 男
    private static final String MALE = "male";
    // 女
    private static final String FEMALE = "female";
    // 分隔符\t
    private static String TAB_SEPARATOR = "\t";

    /**
     * 明细搜索weibo2.txt,内容
     */
    public static class SearchStartMapper extends Mapper<LongWritable, Text, Text, Text> {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = new String(value.getBytes(), 0, value.getLength(), "GBK");
            String[] split = line.split(TAB_SEPARATOR);
            if (split.length >= 3) {
                String gender = split[1].trim();
                String content = split[0] + TAB_SEPARATOR + split[2];
                context.write(new Text(gender), new Text(content));
            }
        }
    }

    /**
     * 分区，分组
     */
    public static class SearchStarPartitioner extends Partitioner<Text, Text> {
        private static Map<String, Integer> map = new HashMap<>();

        static {
            map.put(MALE, 0);
            map.put(FEMALE, 1);
        }

        @Override
        public int getPartition(Text key, Text value, int num) {
            String sex = key.toString();
            if (num == 0) {
                return 0;
            }
            Integer gender = map.get(sex);
            return gender == null ? 0 : gender;
        }
    }

    public static class SearchStarCombiner extends Reducer<Text, Text, Text, Text> {
        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            int maxHotIndex = Integer.MIN_VALUE;
            String name = "";
            for (Text value : values) {
                String line = value.toString();
                String[] split = line.split(TAB_SEPARATOR);
                if (split.length >= 2) {
                    int hotIndex = Integer.parseInt(split[1]);
                    if (hotIndex > maxHotIndex) {
                        name = split[0];
                        maxHotIndex = hotIndex;
                    }
                }
            }
            context.write(new Text(name), new Text(name + TAB_SEPARATOR + maxHotIndex));
        }
    }

    public static class SearchStarReduce extends Reducer<Text, Text, Text, Text> {

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            int maxHotIndex = Integer.MIN_VALUE;
            String name = "";
            for (Text value : values) {
                String line = value.toString();
                String[] split = line.split(TAB_SEPARATOR);
                if (split.length >= 2) {
                    int hotIndex = Integer.parseInt(split[1]);
                    if (hotIndex > maxHotIndex) {
                        name = split[0];
                        maxHotIndex = hotIndex;
                    }
                }
            }
            context.write(new Text(name), new Text(name + TAB_SEPARATOR + maxHotIndex));
        }
    }


    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = new Configuration();
        Path outPath = new Path(args[1]);
        FileSystem fs = outPath.getFileSystem(conf);
        if (fs.exists(outPath)) {
            boolean flag = fs.delete(outPath, true);
        }
        //创建job
        Job job = Job.getInstance(conf, "searchStar");
        //设置jar信息
        job.setJarByClass(SearchStar.class);
        //设置reduce文件才分个数
        job.setNumReduceTasks(2);
        //设置mapper 信息
        job.setMapperClass(SearchStar.SearchStartMapper.class);
        //设置分组partitioner信息
        job.setPartitionerClass(SearchStar.SearchStarPartitioner.class);
        //设置排序combiner信息
        job.setCombinerClass(SearchStar.SearchStarCombiner.class);
        //设置reducer信息
        job.setReducerClass(SearchStar.SearchStarReduce.class);
        //设置mapper 和reduce的输出个数，如果相同只需要只是一个
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        //设置fs 文件地址
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        String[] filePath = {

        };
        int ec = ToolRunner.run(new Configuration(), new SearchStar(), filePath);
        System.exit(ec);
    }
}
