package com.chief.hadoop.yarn.tools;

import com.chief.hadoop.mr.join.OrderBean;
import com.chief.hadoop.yarn.OrderMapper;
import com.chief.hadoop.yarn.OrderReduce;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.viewfs.ViewFileSystem;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;

import java.net.URI;

public class OrderTool implements Tool {

    private Configuration configuration;

    @Override
    public Configuration getConf() {
        return configuration;
    }

    @Override
    public void setConf(Configuration conf) {
        this.configuration = conf;
    }

    @Override
    public int run(String[] args) throws Exception {
        configuration.set("mapreduce.job.jar", "E:\\bigdata-start\\hadoop3\\hadoopCode\\target\\hadoopCode-1.0-SNAPSHOT.jar");
        configuration.set("mapreduce.app-submission.cross-platform", "true");

        configuration.set("mapreduce.job.queuename", "hive");
        configuration.set("mapreduce.job.priority", "5");

        // 开启map端输出压缩
        configuration.setBoolean("mapreduce.map.output.compress", true);
        // 设置map端输出压缩方式
        configuration.setClass("mapreduce.map.output.compress.codec", BZip2Codec.class, CompressionCodec.class);


        FileSystem fileSystem = ViewFileSystem.newInstance(new URI("viewfs://mycluster/"), configuration);
        fileSystem.delete(new Path("/data1/mapJoinOut"), true);

        Job job = Job.getInstance(configuration, "all");

        job.setJarByClass(OrderTool.class);

        job.setMapperClass(OrderMapper.class);
        job.setMapOutputKeyClass(OrderBean.class);
        job.setMapOutputValueClass(OrderBean.class);

        job.addCacheFile(URI.create("/data1/goods.txt"));

        job.setReducerClass(OrderReduce.class);
        job.setOutputKeyClass(OrderBean.class);
        job.setOutputValueClass(NullWritable.class);

        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path("/data1/mapJoinOut"));

        // 设置reduce端输出压缩开启
        FileOutputFormat.setCompressOutput(job, true);
        // 设置压缩的方式
        FileOutputFormat.setOutputCompressorClass(job, BZip2Codec.class);

        boolean b = job.waitForCompletion(true);
        return b ? 0 : 1;
    }

}


