package mr.fileToDb;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import utils.CSVFileUtil;

/**
 *
 * @author 李岩飞
 * @email eliyanfei@126.com
 * 2017年7月28日 上午9:43:34
 *
 */
public class SceneKpiMapper extends Mapper<LongWritable, Text, Text, EsbMRBean> {
	private static Logger logger = LoggerFactory.getLogger(SceneKpiMapper.class);

	@Override
	protected void map(LongWritable key, Text line, Context context) throws IOException, InterruptedException {
		String str = line.toString();
		if (str.startsWith("sceneId"))
			return;
		try {
			String cols[] = CSVFileUtil.parseLine(str);
			EsbMRBean bean = new EsbMRBean();
			bean.parse(cols);
			context.write(new Text(bean.getKey()), bean);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	@Override
	protected void cleanup(Context context) throws IOException, InterruptedException {
		super.cleanup(context);
		logger.info("清空mapper任务");
	}

}