package com.rrd.dw.mr.gzip;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.rrd.dw.utils.HadoopUtils;
import com.rrd.dw.utils.MD5Util;

public class GzipMR extends Configured implements Tool {
    public static final String FILED_SPLIT_FLAG = "\001";
    private static Text key_text = new Text();
    public static class GzipMaper  extends Mapper<LongWritable, Text, Text, Text> {

        @Override
        protected void map(LongWritable key, Text value, Context context)
                throws IOException, InterruptedException {
            key_text.set(MD5Util.getMd5(value.toString()));
            context.write(key_text, value);

        }
    }

    public static class GzipMaper2  extends Mapper<LongWritable, Text, NullWritable, Text> {
        private static NullWritable key_null = NullWritable.get();
        @Override
        protected void map(LongWritable key, Text value, Context context)
                throws IOException, InterruptedException {
            key_text.set(MD5Util.getMd5(value.toString()));
            context.write(key_null, value);
        }
    }
    public static class GzipRducer
            extends Reducer<Text, Text, NullWritable, Text> {
        private static NullWritable key_null = NullWritable.get();

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
            for(Text t : values){
                context.write(key_null, t);
            }

        }
    }



    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        Job job = Job.getInstance( conf);


        conf.set("mapreduce.output.fileoutputformat.compress", "true");
        conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
                "org.apache.hadoop.io.compress.GzipCodec");
        conf.set("io.compression.codecs",
                "org.apache.hadoop.io.compress.GzipCodec");


        String inputDir = conf.get("param.input.path");
        String outDir = conf.get("param.out.path");
        String isReduce = conf.get("param.is.reduce","true");
        String isFilterTmp = conf.get("param.is.filter.tmp","false");
        if(inputDir==null || inputDir.equals("")){
            System.out.println("请输入输入路径：param.input.path");
            System.exit(0);
        }
        if(outDir==null || outDir.equals("")){
            System.out.println("请输入输出路径：param.out.path");
            System.exit(0);
        }
        System.out.println("param.input.path=" + inputDir);
        System.out.println("param.out.path=" + outDir);
        Path outpath = new Path(outDir);
        HadoopUtils.delete(conf, outpath);
        if(isFilterTmp.equals("true")){
            FileSystem fs = FileSystem.get(new Configuration());
            FileStatus[] status = fs.globStatus(new Path(inputDir+"/*"),new RegexExcludePathFilter(".*\\.data(?<!\\.tmp)$"));
            Path[] listedPaths = FileUtil.stat2Paths(status);
            if(listedPaths==null || listedPaths.length<=0){
                System.out.println("输入路径除去.tmp结尾的文件，没有其他文件！");
                System.exit(0);
            }
            for (Path p: listedPaths){
                System.out.println("文件："+p.toString());
            }
            TextInputFormat.setInputPaths(job, listedPaths);
        }else{
            TextInputFormat.setInputPaths(job, new Path(inputDir));
        }
        TextOutputFormat.setOutputPath(job, new Path(outDir));
        FileOutputFormat.setCompressOutput(job, true);
        FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class); //设置压缩格式
        job.setOutputKeyClass(Text.class);
        System.out.println("param.is.reduce="+isReduce);
        if(isReduce.equals("true")){
            job.setMapperClass(GzipMaper.class);
            job.setReducerClass(GzipRducer.class);
        }else{
            job.setMapOutputKeyClass(NullWritable.class);
            job.setMapperClass(GzipMaper2.class);
        }
        job.setJarByClass(GzipMR.class);
        return job.waitForCompletion(true) ? 0 : 1;
    }

    private class RegexExcludePathFilter implements PathFilter {
        private final String regex;
        public RegexExcludePathFilter(String regex) {
            this.regex = regex;
        }
        @Override
        public boolean accept(Path path) {
            System.out.println("forpath:"+path+","+path.toString().matches(regex));
            return path.toString().matches(regex);
        }
    }
    public static void main(String[] args) throws Exception {

        int res = ToolRunner.run(new GzipMR(), args);
        System.exit(res);
    }

}
