package club.drguo.hadoop.mapreduce.enhance;

import java.io.IOException;
import java.util.HashMap;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

/**
 * 读一行，获取url，查规则库，获得分析结果，追加到原来的日志后 若查不到结果，则输出到带爬清单 不需要reduce汇总
 * 
 * @author guo
 *
 */
public class LogEnhanceMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
	private HashMap<String, String> ruleMap = new HashMap<>();

	// 在mapper task初始化时调用一次，只有一次
	@Override
	protected void setup(Mapper<LongWritable, Text, Text, NullWritable>.Context context)
			throws IOException, InterruptedException {
		DBLoader.dbLoader(ruleMap);
	}

	@Override
	protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, NullWritable>.Context context)
			throws IOException, InterruptedException {
		String line = value.toString();
		String[] strings = StringUtils.split(line, "\t");
		try {
			if (strings.length > 30 && StringUtils.isNotEmpty(strings[26]) && strings[26].startsWith("http")) {
				String url = strings[26];
				// 查规则库，追加info
				String info = ruleMap.get(url);
				String result = "";
				if (info != null) {
					result = line + "\t" + info + "\r\n";
					context.write(new Text(result), NullWritable.get());
				} else {
					// 表示加入带爬清单
					result = url + "\t" + "tocrawl" + "\r\n";
					context.write(new Text(result), NullWritable.get());
				}
			}
		} catch (Exception e) {
			System.out.println("mapper occur exception。。。。。");
		}
	}
}
