package com.zyh.demo5;

import com.zyh.entity.PlayWritable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;

public class SortLiveJob extends Configured implements Tool {
    @Override
    public int run(String[] strings) throws Exception {
        /**
         * 组装job任务
         */
        //String input = "/mr/demo1/user-pay-2021-9-9.log";
        String input = "/mr/demo1/sortlive.txt";
        String output = "/mr/out/demo5/";

        //1 初始化配置
        Configuration conf = new Configuration();//自动加载classpath下的配置文件。
        conf.set("fs.defaultFS","hdfs://192.168.193.10:9000");

        //2 创建job
        Job job = Job.getInstance(conf);
        job.setJarByClass(SortLiveJob.class);

        //3 设置输入
        FileInputFormat.setInputPaths(job,new Path(input));

        //4 设置mapper
        job.setMapperClass(SortLiveMapper.class);
        job.setMapOutputKeyClass(PlayWritable.class);
        job.setMapOutputValueClass(Text.class);
        //5 设置reducer
        job.setReducerClass(SortLiveReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        //6 设置输出路径(输出路径不能存在)
        FileOutputFormat.setOutputPath(job,new Path(output));
        //如果输出路径存在则删除。
        if(FileSystem.get(conf).exists(new Path(output))){
            FileSystem.get(conf).delete(new Path(output),true);
        }


        //7 启动job
        boolean b = job.waitForCompletion(true);
        return b == true ? 0:1;// 成功返回0 ，失败返回1
    }

    public static void main(String[] args) throws Exception{
        ToolRunner.run(new SortLiveJob(),args);
    }
}
class SortLiveMapper extends Mapper<LongWritable,Text,PlayWritable,Text>{
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String[] split = value.toString().split("\\s+");
        String name = split[0];
        String timeStr = split[2];
        String viewerStr = split[3];
        context.write(new PlayWritable(Integer.parseInt(viewerStr),Integer.parseInt(timeStr)),new Text(name));
    }
}
class SortLiveReducer extends Reducer<PlayWritable,Text,Text,Text>{
    @Override
    protected void reduce(PlayWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        for (Text value : values) {
            context.write(value,new Text(key.getViewer()+"-"+ key.getLength()));
        }
    }
}