package crossjob;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;

/**
 * Created by zhaopeng on 2/24/17.
 */
public class CrossJobLogMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    private int[] tags = {562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 12021, 572, 11261,
            11262, 11263, 11264, 11265, 11266, 11267, 11268, 11269, 11270, 11271, 11272, 11273, 11274};
    private Set<Integer> hashedValue = new HashSet<>();

    private IntWritable writeValue = new IntWritable();
    private String tid;
    private boolean iscontinue;
    private int tagValue;

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        hashedValue.clear();
        for (int i : tags) {
            hashedValue.add(i);
        }

        //参考http://stackoverflow.com/questions/11130145/hadoop-multipleinputs-fails-with-classcastexception
        //使用反射的方法获取文件名，原因为使用了MultipleInputs，导致这里为TaggedInputSplit，无法转换，需要反射来强行获取
        InputSplit split = context.getInputSplit();
        Class<? extends InputSplit> splitClass = split.getClass();

        FileSplit fileSplit = null;
        if (splitClass.equals(FileSplit.class)) {
            fileSplit = (FileSplit) split;
        } else if (splitClass.getName().equals(
                "org.apache.hadoop.mapreduce.lib.input.TaggedInputSplit")) {
            // begin reflection hackery...
            try {
                Method getInputSplitMethod = splitClass.getDeclaredMethod("getInputSplit");
                getInputSplitMethod.setAccessible(true);
                fileSplit = (FileSplit) getInputSplitMethod.invoke(split);
            } catch (Exception e) {
                // wrap and re-throw error
                throw new IOException(e);
            }
            // end reflection hackery
        }

        String fileName = fileSplit.getPath().getName();
        tid = fileName.substring(0, fileName.indexOf("-"));
        try {
            tagValue = Integer.parseInt(tid);
            iscontinue = hashedValue.contains(tagValue);
            if (iscontinue)
                writeValue.set(tagValue);
        } catch (Exception e) {
            iscontinue = false;
        }
    }

    @Override
    public void map(LongWritable key, Text values, Context context) throws IOException, InterruptedException {

        if (iscontinue) {
            context.write(values, writeValue);
            return;
        }

    }


}
