package com.linkstec.mot.service;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.fastjson.JSON;
import com.linkstec.mot.business.IDealData;
import com.linkstec.mot.cons.Constants;
import com.linkstec.mot.dao.DataBatchDao;
import com.linkstec.mot.queue.DataQueue;
import com.linkstec.mot.util.ConfigUtil;
import com.linkstec.mot.util.ExceptionTracker;
import com.linkstec.mot.util.SpringContextUtil;
import com.linkstec.mot.util.StringUtil;

/**
 * 拉取队列中数据，实例化处理类，处理完成后提交入库。
 * 
 * @author PENGYAO
 * 
 */
public class DataWorkerService implements Runnable {

	private static Logger logger = LoggerFactory
			.getLogger(DataWorkerService.class);

	private DataBatchDao dataBatchDao = SpringContextUtil
			.getBean("dataBatchDao");

	/**
	 * 若只有一条数据，batch等待时间。
	 */
	private static int checkTimes = 3;

	@Override
	public void run() {
		if (StringUtil.isNullOrEmpty(sql)) {
			logger.error(String.format("未配置入库SQL，EXP:%s",
					"INSERT INTO TABLE_NAME(COLUMN_A,COLUMN_B) "
							+ "VALUES(#{columnA},#{columnB})"));
			logger.error("dataWorker线程退出运行...");
			return;
		}
		int loopCount = 0;
		int checknum = 0;
		List<Map<String, Object>> batchData = new ArrayList<Map<String, Object>>();
		String all = ConfigUtil.getProperty("all");
		while (true) {
			try {
				Object temp = DataQueue.poll();
				if (temp != null) {
					Map<String, Object> mapData = null;
					if (dealData != null) {
						mapData = dealData.dealData(temp);
					} else if (temp instanceof String) {
						mapData = JSON.parseObject((String) temp);
					}
					if (null == mapData || mapData.size() == 0) {
						// 如果类返回为空，说明此条数据不想处理，不放入处理集合。
						continue;
					}
					batchData.add(mapData);
					loopCount++;
					if (loopCount == Integer.parseInt(ConfigUtil.getProperty("batchDealNum"))) {
						if ("true".equals(all)) {
							dataBatchDao.batchInsertAll(batchData);
						} else {
							dataBatchDao.batchInsert(batchData);
						}
						batchData.clear();
						loopCount = 0;
					}
					checknum = 0;
				} else {
					Thread.sleep(100);
					if (batchData.size() > 0) {
						checknum++;
						if (checknum >= checkTimes) {
							if ("true".equals(all)) {
								dataBatchDao.batchInsertAll(batchData);
							} else {
								dataBatchDao.batchInsert(batchData);
							}
							batchData.clear();
							checknum = 0;
						}
					}
				}
			} catch (Exception e) {
				batchData.clear();
				loopCount = 0;
				checknum = 0;
				logger.error(ExceptionTracker.trace(e));
			}
		}

	}

	private static String sql = null;
	private static IDealData dealData = null;

	static {
		sql = ConfigUtil.getProperty("sql");
		String dealDataPath = ConfigUtil.getProperty("dealdata");
		if (!StringUtil.isNullOrEmpty(dealDataPath)) {
			try {
				dealData = (IDealData) Class.forName(dealDataPath)
						.newInstance();
				logger.info("data worker dealdata，classpath:" + dealDataPath);
			} catch (InstantiationException e) {
				logger.error(ExceptionTracker.trace(e));
			} catch (IllegalAccessException e) {
				logger.error(ExceptionTracker.trace(e));
			} catch (ClassNotFoundException e) {
				logger.error(ExceptionTracker.trace(e));
			}
		}
	}

}
