package com.zyh.mapreduce.demo4;


import com.zyh.entity.MyIntWritable;
import com.zyh.entity.PlayWritable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
import java.util.TreeMap;

public class SortLiveJob extends Configured implements Tool {
    @Override
    public int run(String[] strings) throws Exception {
        /**
         * 组装job任务
         */
        //String input = "/mr/demo1/user-pay-2021-9-9.log";
        String input = "/mr/demo1/sortlive.txt";
        String output = "/mr/out/demo4/";

        //1 初始化配置
        Configuration conf = new Configuration();//自动加载classpath下的配置文件。
        conf.set("fs.defaultFS","hdfs://192.168.193.10:9000");

        //2 创建job
        Job job = Job.getInstance(conf);
        job.setJarByClass(SortLiveJob.class);

        //3 设置输入
        FileInputFormat.setInputPaths(job,new Path(input));

        //4 设置mapper
        job.setMapperClass(LiveMapper.class);
        job.setMapOutputKeyClass(MyIntWritable.class);
        job.setMapOutputValueClass(Text.class);
        //5 设置reducer
        job.setReducerClass(LiveReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        //6 设置输出路径(输出路径不能存在)
        FileOutputFormat.setOutputPath(job,new Path(output));
        //如果输出路径存在则删除。
        if(FileSystem.get(conf).exists(new Path(output))){
            FileSystem.get(conf).delete(new Path(output),true);
        }


        //7 启动job
        boolean b = job.waitForCompletion(true);
        return b == true ? 0:1;// 成功返回0 ，失败返回1
    }

    public static void main(String[] args) throws Exception{
        ToolRunner.run(new SortLiveJob(),args);
    }

    private static class LiveMapper extends Mapper<LongWritable,Text, MyIntWritable,Text> {
        private TreeMap<MyIntWritable,String> map = new TreeMap<>();
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] s = value.toString().split("  ");
            String name = s[0];
            String viewer = s[3];
            map.put(new MyIntWritable(Integer.parseInt(viewer)),name);
            if (map.size()>3){
                map.remove(map.lastKey());
            }
        }

        /**
         * 会在map方法循环结束后执行一次
         * @param context
         * @throws IOException
         * @throws InterruptedException
         */
        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            map.forEach((viewer,name)-> {
                try {
                    context.write(viewer,new Text(name));
                } catch (IOException e) {
                    e.printStackTrace();
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            });
        }
    }
    private static class LiveReducer extends Reducer<MyIntWritable,Text, Text, IntWritable> {
        private int i = 0;
        @Override
        protected void reduce(MyIntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            for (Text value : values) {
                i++;
                context.write(value,new IntWritable(key.getValue()));
            }

        }
    }
}

