package com.gome.ss.flowcrawl.extractor;

import java.util.Date;

import javax.jms.JMSException;

import org.apache.log4j.Logger;
import org.bson.types.BSONTimestamp;

import com.gome.ss.common.AppConfiguer;
import com.gome.ss.common.Utils;
import com.gome.ss.core.schedule.ThreadTaskBase;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.QueryOperators;


public class FlowCrawlExtractorIncrementalTask extends FlowCrawlExtractorTask implements ThreadTaskBase {
	private static Logger logger = Logger.getLogger(FlowCrawlExtractorIncrementalTask.class);
	
	public FlowCrawlExtractorIncrementalTask(FlowCrawlExtractor extractor) {
		super(extractor);
	}
	
	@Override
	public void execute() {
		FlowCrawlExtractorConfig config = extractor.getConfig();
		
		DBCollection rplogCollection = AppConfiguer.flowCrawlConfigMongo.getDB("local")
				 .getCollection("oplog.rs");
		
		Date now = new Date();
		
		BSONTimestamp ts = new BSONTimestamp((int) (now.getTime()/1000), 1);
		while (!needStop) {
			
			BasicDBObject query = new BasicDBObject();
			query.append("ts", new BasicDBObject(QueryOperators.GT, ts));
			query.append("ns", AppConfiguer.FLOWCRAWL_DB_NAME+"."+extractor.getConfig().getName());
			query.append("op", "i");
			query.append("o.stepID", config.getStepID());
			DBCursor cursor = rplogCollection.find(query).sort(new BasicDBObject("$natural",1));
			DBObject opObj = null;
			while (cursor.hasNext() && !needStop) {
				opObj = cursor.next();
				try {
					if (opObj != null && opObj.get("o") != null) {
						putQueue((DBObject) opObj.get("o"));
						session.commit();
					}
				} catch (JMSException e) {
					logger.error(Utils.getTrace(e));
				}
			}
			
			if (opObj != null)
				ts = (BSONTimestamp) opObj.get("ts");
		}
		
		close();
		extractor.getStatus().stop();
	}
	
	@Override
	public void stop() {
		logger.info(extractor.getConfig().getName() + " incremental extract db task stop.");
		needStop = true;
	}
	
	public static void main(String[] args) {
		AppConfiguer.initFlowCrawlSpider();
		
		DBCollection rplogCollection = AppConfiguer.flowCrawlConfigMongo.getDB("local")
				 .getCollection("oplog.rs");
		
		Date now = new Date();
		BSONTimestamp ts = new BSONTimestamp((int) (now.getTime()/1000), 1);
		while(true) {
			BasicDBObject query = new BasicDBObject();
			query.append("ts", new BasicDBObject(QueryOperators.GT, ts));
			query.append("ns", AppConfiguer.FLOWCRAWL_DB_NAME+"."+"weixin");
			query.append("op", "i");
			query.append("o.stepID", "4");
			DBCursor cursor = rplogCollection.find(query).sort(new BasicDBObject("$natural",1));
			DBObject opObj = null;
			while (cursor.hasNext()) {
				opObj = cursor.next();
				if (opObj != null && opObj.get("o") != null) {
					DBObject data = (DBObject) opObj.get("o");
					System.out.println((String) data.get("url"));
				}
			}
			
			if (opObj != null) {
				ts = (BSONTimestamp) opObj.get("ts");
				System.out.println(ts.getTime() + " " + ts.getInc());
			}
		}
	}
}
