package com.caul.demo.hadoop.mapreduce.idx;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * Created by sdliang on 2018/3/31.
 */
public class WordIndexMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

  private static String fileName = null;
  private static String contWord = null;
  private static Pattern pattern = null;
  private static Text textKey = null;
  private static IntWritable intValue = null;

  @Override
  protected void setup(Context context) throws IOException, InterruptedException {
    //获取切片所属的文件名
    FileSplit split = (FileSplit) context.getInputSplit();
    fileName = split.getPath().getName();
    contWord = context.getConfiguration().get(WordIndexConstant.CONT_WORD);
    pattern = Pattern.compile(contWord);
    textKey = new Text(fileName);
    intValue = new IntWritable();
    System.out.println("statistic file: " + fileName);
  }

  @Override
  protected void map(LongWritable key, Text value, Context context)
      throws IOException, InterruptedException {

    String line = value.toString().trim();
    if (line.length() == 0) {
      return;
    }
    if (line.contains(contWord)) {
      intValue.set(getCountLine(line));
      context.write(textKey, intValue);
    }
  }

  private int getCountLine(String lineStr) {
    int count = 0;
    Matcher matcher = pattern.matcher(lineStr);
    while (matcher.find()) {
      count++;
    }
    return count;
  }
}
