package com.jml.mapreduce.文件压缩;

import com.jml.mapreduce.wordcount.WordcountMapper;
import com.jml.mapreduce.wordcount.WordcountReducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;

import java.io.IOException;

/**
 * 代码写完后把jar包上传到服务器然后运行
 * yarn jar mapxxx.jar  com.jml.mapreduce.wordcount.WordcountDriver  HDFS的输入   HDFS的输出
 */
public class WordcountDriver {

    /**
     * 处理的本地文件地址
     */
    public static String inPath = "D:\\jml-downtemp\\Downloads\\README.txt";
    /**
     * 本地的输出文件夹
     */
    public static String outPath = "/myout";

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        if(args.length>0){
            inPath = args[0];
            outPath = args[1];
        }
        // 1 获取配置信息以及封装任务
        Configuration configuration = new Configuration();

        //配置shuffle阶段
        configuration.setBoolean("mapreduce.map.output.compress",true);
        //这里使用Bzip2压缩,在sshuffle阶段
        //configuration.setClass("mapreduce.map.output.compress.codec", BZip2Codec.class, CompressionCodec.class);
        //这里使用Gzip2压缩,在sshuffle阶段
        //configuration.setClass("mapreduce.map.output.compress.codec", GzipCodec.class, CompressionCodec.class);
        //这里使用Snappy压缩,在sshuffle阶段
        configuration.setClass("mapreduce.map.output.compress.codec", SnappyCodec.class, CompressionCodec.class);

        //配置reduce阶段的压缩
        configuration.setBoolean("mapreduce.output.fileoutputformat.compress",true);
        //这里使用Snappy压缩,在reduce阶段
        configuration.setClass("mapreduce.output.fileoutputformat.compress.codec",SnappyCodec.class, CompressionCodec.class);


        Job job = Job.getInstance(configuration);
        // 2 设置jar加载路径
        job.setJarByClass(WordcountDriver.class);
        
        // 3 设置map和reduce类
        job.setMapperClass(WordcountMapper.class);
        job.setReducerClass(WordcountReducer.class);
        // 4 设置map输出
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        // 5 设置最终输出kv类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        // 6 设置输入和输出路径
        FileInputFormat.setInputPaths(job, new Path(inPath));
        FileOutputFormat.setOutputPath(job, new Path(outPath));
        //设置reduce的数量
        //job.setNumReduceTasks(10);
        // 7 提交
        boolean result = job.waitForCompletion(true);
        System.exit(result ? 0 : 1);
    }

    /**
     * 压缩
     */
    public void compress() throws Exception {
        //压缩的文件
        String file = "xxx.txt";
        //压缩的类型
        Class<BZip2Codec> codecClass = BZip2Codec.class;
        Configuration configuration = new Configuration();
        //生成压缩格式
        BZip2Codec codec = ReflectionUtils.newInstance(codecClass, configuration);
        FileSystem fileSystem = FileSystem.get(configuration);
        //输入流
        FSDataInputStream fis = fileSystem.open(new Path(file));
        //输出流,生成的压缩文件位置，在这里设置
        FSDataOutputStream fos = fileSystem.create(new Path(file + codec.getDefaultExtension()));
        //压缩格式包装输出流,这是压缩格式的输出流，因为输出流是压缩的，就包装输出流
        CompressionOutputStream cos = codec.createOutputStream(fos);
        IOUtils.copyBytes(fis,cos,1024);
        IOUtils.closeStream(fis);
        IOUtils.closeStream(cos);
    }

    public void deCompress() throws Exception {
        String file = "xxx.txt.bz2";
        Configuration configuration = new Configuration();
        //生成压缩工厂
        CompressionCodecFactory codecFactory = new CompressionCodecFactory(configuration);
        //根据工厂获取压缩对象
        CompressionCodec codec = codecFactory.getCodec(new Path(file));
        //输入流
        FileSystem fileSystem = FileSystem.get(configuration);
        //获取输入流
        FSDataInputStream fis = fileSystem.open(new Path(file));
        //输入流是压缩的，所以包装输入流
        CompressionInputStream cis = codec.createInputStream(fis);
        //输出文件名字
        String outputFile = file.substring(0,file.length() - codec.getDefaultExtension().length());
        FSDataOutputStream fos = fileSystem.create(new Path(outputFile));
        IOUtils.copyBytes(cis,fos,1024);
        IOUtils.closeStream(cis);
        IOUtils.closeStream(fos);
    }
}
