package com.doitedu.mr.day05.movie;

import com.alibaba.fastjson.JSON;
import com.doitedu.mr.day04.bean.MovieWriable;
import com.doitedu.mr.day05.WordCountMapper;
import com.doitedu.mr.day05.WordCountReducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;

import java.io.IOException;
import java.util.*;

/**
 * @Date 2021/12/6
 * @Created by HANGGE
 * @Description TODO
 * 求评论次数最多的前n部电影
 *
 */
public class CntTopN {

    static  class  CntTopNMapper extends Mapper<LongWritable , Text , Text, IntWritable>{
        Text k = new Text() ;
        IntWritable v = new IntWritable(1);
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            try {
                String line = value.toString();
                MovieWriable mw = JSON.parseObject(line, MovieWriable.class);
                k.set(mw.getMovie());
                context.write(k,v);

            } catch (Exception e) {
                e.printStackTrace();
            }

        }
    }
    static  class  CntTopNReducer extends Reducer<Text,IntWritable , Text, IntWritable>{

        // 所有电影和他的评论次数
        Map<Text,IntWritable> mp = new HashMap<>();
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int cnt = 0 ;
            // 遍历
            for (IntWritable value : values) {
                cnt++ ;
            }
            IntWritable v = new IntWritable();
            v.set(cnt);
            Text text = new Text();
            text.set(key.toString());
            mp.put(text , v) ;
            // 输出???  对当前task全局的数据进行排序
           // context.write(key , v);
        }
        /**
         * reduce方法是每个电影id执行一次   执行完毕后 某个电影的评论次数
         * 全局结果排序
         */
        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            Set<Map.Entry<Text, IntWritable>> set = mp.entrySet();
            //将set转换成list
            ArrayList<Map.Entry<Text, IntWritable>> list = new ArrayList<>(set);
            // 排序
            Collections.sort(list, new Comparator<Map.Entry<Text, IntWritable>>() {
                @Override
                public int compare(Map.Entry<Text, IntWritable> o1, Map.Entry<Text, IntWritable> o2) {
                    return o2.getValue().compareTo(o1.getValue());
                }
            });

            for (int i = 0 ; i< Math.min(3,list.size());i++){
                Map.Entry<Text, IntWritable> entry = list.get(i);
                context.write(entry.getKey() , entry.getValue());
            }

        }
    }

    public static void main(String[] args)throws Exception {

        // Job  统计文件中单词出现的次数
        Configuration conf = new Configuration();
        // 1  创建 JOB
        Job job = Job.getInstance(conf, "cnt_topn");
        // 1) Mapper 类
        job.setMapperClass(CntTopNMapper.class);
        // 2) Reducer类
        job.setReducerClass(CntTopNReducer.class);
        // 3)  设置map输出的数据类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        // 4)  设置reduce输出的数据类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        //job.setNumReduceTasks(2);
        // 5) 设置输入的数据路径
        FileInputFormat.setInputPaths(job , new Path("E:\\mrdata\\movie\\input"));
        // 6) 设置输出的结果保存路径
        FileOutputFormat.setOutputPath(job, new Path("E:\\mrdata\\movie\\cnt_topn4"));
        // 2 提交工作
        // 等待执行完成 , 打印执行过
        job.waitForCompletion(true) ;
    }
}
