import Utils.MapUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;

/**
 * @author legolas
 * @date 2020/3/14 下午6:11
 * 统计每天开播时长最长的前10位
 * map输出 Text LongWritable
 * reduce端使用cleanup函数对累加后的数据根据直播时长进行排序
 * 最后输出前10名 Text LongWritable
 */
public class VideoInfoTop10Job {

    public static class Top10Map extends Mapper<LongWritable, Text, Text, LongWritable> {
        @Override
        protected void map(LongWritable k1, Text v1, Context context) throws IOException, InterruptedException {
            //读取清洗后的每一行数据
            String line = v1.toString();
            String[] fields = line.split("\t");
            String id = fields[0];
            long length = Long.parseLong(fields[4]);
            Text k2 = new Text();
            k2.set(id);
            LongWritable v2 = new LongWritable();
            v2.set(length);
            context.write(k2, v2);
        }
    }


    public static class Top10Reduce extends Reducer<Text, LongWritable, Text, LongWritable> {

        //保存主播的开播总时长
        Map<String, Long> map = new HashMap<String, Long>();

        /*任务初始化时仅执行一次，一般做初始化资源的操作*/
        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
           //获取job的main函数中加载的分布式缓存
            //FileReader reader = new FileReader("mycache");
            //BufferedReader br = new BufferedReader(reader);

            super.setup(context);
        }

        /*任务结束时仅执行一次*/
        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            Configuration conf = context.getConfiguration();
            String time = conf.get("time");

            Map<String, Long> sortedMap = MapUtils.sortValueDesc(map);
            Set<Map.Entry<String, Long>> entries = sortedMap.entrySet();
            Iterator<Map.Entry<String, Long>> it = entries.iterator();
            int count = 1;
            while (count <= 10 && it.hasNext()) {
                Map.Entry<String, Long> entry = it.next();
                String key = entry.getKey();
                Long value = entry.getValue();
                Text k3 = new Text();
                k3.set(key + "\t" + time);
                LongWritable v3 = new LongWritable();
                v3.set(value);
                context.write(k3, v3);
                count++;
            }


        }

        @Override
        protected void reduce(Text k2, Iterable<LongWritable> v2s, Context context) throws IOException, InterruptedException {
            //组装k3,v3

            long lengthsum = 0;
            for (LongWritable v2 : v2s) {
                lengthsum += v2.get();
            }
            map.put(k2.toString(), lengthsum);
        }
    }


    public static void main(String[] args) {
        try {
            if (args.length != 2) {
                //如果参数不够，程序直接退出
                System.out.print("请指定输入路径和输出路径！");
                System.exit(100);
            }
            //创建Job需要的配置参数
            Configuration conf = new Configuration();
            //这里如果需要向reduce传递日期等参数，可以在conf里设置，reduce中可以通过context提取
            conf.set("time", "time from args");

            //创建一个job
            Job job = Job.getInstance(conf);

            //注意：这一行必须设置，否则在集群中执行找不到WordCountJob这个类
            job.setJarByClass(VideoInfoTop10Job.class);

            //指定输入路径
            FileInputFormat.setInputPaths(job, new Path(args[0]));
            FileOutputFormat.setOutputPath(job, new Path(args[1]));
            //指定map相关代码
            job.setMapperClass(Top10Map.class);
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(LongWritable.class);

            //指定reduce相关代码
            job.setReducerClass(Top10Reduce.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(LongWritable.class);

            //提交job
            job.waitForCompletion(true);

        } catch (Exception e) {
            e.printStackTrace();
        }
    }


}
