package com.whoami.dataplatform.tag_combine.old;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsBinaryInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.FileInputStream;
import java.io.IOException;
import java.util.BitSet;
import java.util.Properties;

/**
 * 接上一个任务输出的 tid_bits -> uid
 * 转为tid_bits -> 3, uid \n uid \n uid
 *
 * @author tzp
 * @since 2019/7/31
 */
@SuppressWarnings("Duplicates")
public class TidbitsCombine1 extends Configured implements Tool {
    /**
     * 给MR框架的一堆配置, 文件里面的内容会被set到mr conf里
     */
    public static final String PARAMS_MR_PROP = "mr_properties";

    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        String mrProFile = conf.get(PARAMS_MR_PROP);
        if (StringUtils.isNotEmpty(mrProFile)) {
            Properties mrProperties = new Properties();
            mrProperties.load(new FileInputStream(mrProFile));
            for (Object key : mrProperties.keySet()) {
                conf.set(key.toString(), mrProperties.get(key).toString());
            }
        }
        String date = args[0];//20190723
        String input = args[1];
        String output = args[2];//outputbase

        Job job = Job.getInstance(conf, this.getClass().getSimpleName() + " " + date);
        job.setJarByClass(this.getClass());

        //输入
        job.setInputFormatClass(SequenceFileAsBinaryInputFormat.class);
        FileInputFormat.addInputPath(job, new Path(input));

        //mapper
        job.setMapperClass(Mapper.class);
        job.setMapOutputKeyClass(BytesWritable.class);
        job.setMapOutputValueClass(BytesWritable.class);

        //reducer
        job.setReducerClass(SimpleReducer.class);
        job.setNumReduceTasks(8);

        //输出
        job.setOutputFormatClass(TextOutputFormat.class);

        FileOutputFormat.setOutputPath(job, new Path(output));

        boolean result = job.waitForCompletion(true);

        return result ? 0 : 1;
    }

    public static class SimpleReducer extends Reducer<BytesWritable, BytesWritable, Text, Text> {
        Text outKey = new Text();
        Text outValue = new Text();

        @Override
        protected void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context) throws IOException, InterruptedException {
            BitSet bitSet = BitSet.valueOf(key.copyBytes());
            outKey.set(bitSet.toString());
            int i = 0;
            for (BytesWritable ignore : values) {
                i++;
            }
            outValue.set(String.valueOf(i));
            context.write(outKey, outValue);
        }
    }

    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(new Configuration(), new TidbitsCombine1(), args));
    }
}
