package com.mall.hadoop.job.dataclear;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import net.sf.json.JSONArray;
import net.sf.json.JSONObject;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.mall.common.dao.MongoDBManager;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;

/**
 * 清洗数据mapper，保存mongodb
 * @author tgy
 *
 */
@SuppressWarnings("unchecked")
public class DataSourceMap extends Mapper<LongWritable, Text, Text, IntWritable>{
	public static Logger logger = LoggerFactory.getLogger(DataSourceMap.class);
	
	@Override
	public void map(LongWritable key,Text value,Context context) throws IOException, InterruptedException{
		
		List<DBObject> list = new ArrayList<DBObject>();
		try{
			Map<String,Object> map = JSONObject.fromObject(value.toString());
			for(String k:map.keySet()){
				if(k.equals("data_list")){
					JSONArray arr = JSONArray.fromObject(map.get(k).toString());
					for(int i=0;i<arr.size();i++){
						Map<String,Object> data = JSONObject.fromObject(arr.get(i));
						Map<String,Object> mong = new HashMap<String,Object>();
						for(String st:data.keySet()){
							mong.put(st, data.get(st));
						}
						mong.put("commit_time", map.get("commit_time"));
						mong.put("system_type", map.get("system_type"));
						list.add(new BasicDBObject(mong));
					}
				}
			}
			if(map.get("system_type").equals("HTML")){
				MongoDBManager.insertBatchDBObject("html_collection", list);
			}else if(map.get("system_type").equals("PC")){
				MongoDBManager.insertBatchDBObject("pc_collection", list);
			}else{
				MongoDBManager.insertBatchDBObject("sdk_collection", list);
			}
		}catch(Exception e){
			logger.info("解析数据出现异常："+e.toString());
		}
	}
}
