package com.mall.hadoop.job.babytop;

import java.util.HashMap;
import java.util.Map;

import net.sf.json.JSONObject;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.mall.hadoop.job.dataclear.DataSourceMap;
import com.mall.manager.service.BabyTopService;

/**
 * 宝宝单题数据清洗
 * @author tgy
 *
 */
@SuppressWarnings("unchecked")
public class BabyTopMap extends Mapper<LongWritable, Text, Text, Text>{

	public static Logger logger = LoggerFactory.getLogger(DataSourceMap.class);
	
	
	@Override
	public void map(LongWritable key,Text value,Context context){

		Map<String,Map<String,Object>> dm = new HashMap<String,Map<String,Object>>();
		Map<String,Object> map = JSONObject.fromObject(value.toString());
		Map<String,Object> m = new HashMap<String,Object>();
		m.put("topic_id",map.get("t_id"));
		m.put("topic_name", map.get("t_title"));
		m.put("baby_id",map.get("baby_id"));
		m.put("baby_name", map.get("baby_name"));
		m.put("garden_id", map.get("garden_id"));
		m.put("garden_name",map.get("garden_name"));
		m.put("teacher_id",map.get("teacher_id"));
		m.put("teacher_name", map.get("teacher_name"));
		m.put("topic_time",map.get("t_time"));
		m.put("data_type",map.get("system_type").toString());
		m.put("commit_time",map.get("commit_time"));
		dm.put("1",m);
		//清洗一行数据，执行一次插入rds
		if(dm.size()>0){
			try{
				BabyTopService.insertBatch(dm);
			}catch(Exception e){
				logger.info("批量插入宝宝单题信息出现异常："+e.toString());
			}
		}
	}
}






