package com.gome.extract;

import java.io.IOException;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;

import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.log4j.Logger;

import com.gome.extract.common.AppConfiguer;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.Mongo;
import com.mongodb.MongoOptions;
import com.mongodb.ServerAddress;
import com.mongodb.util.JSON;

public class SpiderDataMapper extends TableMapper<Text, IntWritable> {
	private static Logger logger = Logger.getLogger(SpiderDataMapper.class);
	public static Mongo flowCrawlConfigMongo = null;
	static {
		try {
			MongoOptions options = new MongoOptions();
			options.connectionsPerHost = 100;
			options.autoConnectRetry = true;
			options.threadsAllowedToBlockForConnectionMultiplier=5;
			options.slaveOk = true;
			ServerAddress serverAddress=new ServerAddress(AppConfiguer.getStringByKey("mongodb_host"),
					AppConfiguer.getIntByKey("mongodb_port"));
			flowCrawlConfigMongo = new Mongo(serverAddress, options);
		} catch (UnknownHostException e) {
			// TODO Auto-generated catch block
			logger.error(AppConfiguer.getTrace(e));
		}
	}
	
	private Text word = new Text("count");
	private final static IntWritable one = new IntWritable(1);
	
    public void map(ImmutableBytesWritable key, Result value, Context context)
    		throws IOException, InterruptedException {
    	
    	String url = Text.decode(value.getValue("info".getBytes(), "url".getBytes()));
    	String id = Text.decode(value.getValue("info".getBytes(), "id".getBytes()));
    	String title = Text.decode(value.getValue("info".getBytes(), "title".getBytes()));
    	String stepID = Text.decode(value.getValue("info".getBytes(), "stepID".getBytes()));
    	String pageSource = Text.decode(value.getValue("info".getBytes(), "pageSource".getBytes()));
    	String cookies = Text.decode(value.getValue("info".getBytes(), "cookies".getBytes()));
    	String failedAction = Text.decode(value.getValue("info".getBytes(), "failedAction".getBytes()));
    	String failedWait = Text.decode(value.getValue("info".getBytes(), "failedWait".getBytes()));
    	String crawlName = Text.decode(value.getValue("info".getBytes(), "crawlName".getBytes()));
    	
    	Map<String, String> pageInfo = new HashMap<String, String>();
    	
    	if (value.getValue("parent".getBytes(), "id".getBytes()) != null) {
	    	String pid = Text.decode(value.getValue("parent".getBytes(), "id".getBytes()));
	    	String purl = Text.decode(value.getValue("parent".getBytes(), "url".getBytes()));
	    	String pqn = Text.decode(value.getValue("parent".getBytes(), "qn".getBytes()));
	    	String pstepid = Text.decode(value.getValue("parent".getBytes(), "stepID".getBytes()));
	    	String pcookies = Text.decode(value.getValue("parent".getBytes(), "cookies".getBytes()));
	    	
			pageInfo.put("id", pid);
			pageInfo.put("url", purl);
			pageInfo.put("qn", pqn);
			pageInfo.put("stepID", pstepid);
			pageInfo.put("cookies", pcookies);
    	}
    	
    	SpiderDataMap dataMap = new SpiderDataMap();
		dataMap.add("id", id);
		dataMap.add("stepID", stepID);
		dataMap.add("title", title);
		dataMap.add("url", url);
		dataMap.add("failedAction", pageInfo);
		dataMap.add("failedWait", pageInfo);
		dataMap.add("cookies", cookies);
		dataMap.add("pageSource", pageSource);
		dataMap.add("parent", pageInfo);
		
		DBObject obj = (BasicDBObject) JSON.parse(dataMap.toJson());
		DBCollection pageCollection = flowCrawlConfigMongo.getDB("flowcrawl").getCollection(crawlName);
		pageCollection.insert(obj);
		
    	context.write(word, one);
    }
}
