package com.winning.bi.job;

import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.bson.Document;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.PersistJobDataAfterExecution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.support.ClassPathXmlApplicationContext;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import com.winning.bi.cache.CacheUtil;
import com.winning.bi.data.Message;
import com.winning.bi.jdbc.QuartzUtils;
import com.winning.bi.jdbc.ScheduleJob;
import com.winning.bi.jdbc.dao.Dao;

import mongo.MongoDbUtil;

/**
 * 
 * @author 张大川
 *
 *         分页获取
 */
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public class SimpleSqlJob implements Job {
	private static Logger log = LoggerFactory.getLogger(SimpleSqlJob.class);
	Map<String, Object> map2;
	static long page = 200;
	String sql;

	/**
	 * 转换格式
	 * 
	 * @param columnMap
	 * @return
	 */

	// private Map<String, String> filter(String columnMap) {
	// Map<String, String> ll = null;
	// if (StringUtils.isNoneBlank(columnMap)) {
	// String[] strings = StringUtils.split(columnMap, ",");
	// ll = new HashMap<>();
	//
	// for (int i = 0; i < strings.length; i++) {
	// String value = strings[i];
	// String[] strings2 = StringUtils.split(value, ":");
	//
	// ll.put(StringUtils.lowerCase(strings2[0]),
	// StringUtils.lowerCase(strings2[1]));
	// }
	// }
	// return ll;
	// }

	void saveToMongo(List<Map<String, Object>> list, String taskName) {
		// Document documentDetail = new Document();
		for (int i = 0; i < list.size(); i++) {
			Document documentDetail = new Document();
			documentDetail.put("loadtime", LocalDateTime.now().toString());
			documentDetail.put("content", list.get(i));
			documentDetail.put("taskname", taskName);
			MongoDbUtil.getDatabase("test").getCollection("log").insertOne(documentDetail);
		}

	}

	public void execute(JobExecutionContext context) {

		ScheduleJob job = (ScheduleJob) context.getMergedJobDataMap().get("scheduleJob");
		
		Map<String, Object> properties = job.getMap();
		ClassPathXmlApplicationContext ac = (ClassPathXmlApplicationContext) context.getMergedJobDataMap().get("ac");
		Dao dao = (Dao) ac.getBean("daoSource");
		CacheUtil.init();
		String rowversion = CacheUtil.getCache().get(properties.get("jobid").toString());
		log.info("[Ehcache]" + rowversion);
		if (StringUtils.isBlank(rowversion)) {
			rowversion = context.getJobDetail().getJobDataMap().get("rowversion").toString();
		}

		byte[] bs = dao.getMaxVersion(properties.get("sourcetable").toString());
		boolean query = false;
		String now = QuartzUtils.bytesToHexString(bs);
		System.out.println(now);
		CacheUtil.getCache().put(properties.get("jobid").toString(), now);
		CacheUtil.close();

		context.getJobDetail().getJobDataMap().put("rowversion", now);
		if (StringUtils.compareIgnoreCase(now, rowversion) > 0) {
			log.info("有数据更新");
			query = true;
		} else {
			log.info("无数据更新");
		}
		if (query) {
			String mapString = properties.get("mapping").toString();
			List<String> columList = new ArrayList<String>();// 字段集合
			Map<String, String> ll = JSON.parseObject(mapString, new TypeReference<Map<String, String>>() {
			});

			for (Map.Entry<String, String> entry : ll.entrySet()) {
				columList.add(entry.getKey());
			}

			String from = StringUtils.remove(columList.toString(), "[");
			String string = StringUtils.remove(from, "]");
			if (!ll.containsKey("timestamp")) {
				string = string + ", timestamp";
			}
			String countSql = "Select count(*) from " + properties.get("sourcetable") + " WHERE  timestamp BETWEEN "
					+ rowversion + " AND " + now;
			Long long1 = dao.getCountNumber(countSql);
			
			String sqltemp = "select " + string + " from " + properties.get("sourcetable")
					+ " WHERE  timestamp BETWEEN " + rowversion + " AND " + now + " order by timestamp " + " OFFSET  "
					+ "_off_" + " rows fetch next " + page + " rows only";
			Long pageCount = long1 / page + 1;
			
			Map<String, String> map = dao.getMetaData(properties.get("sourcetable").toString());
			for (Long i = 0L; i < pageCount; i++) {
				
				sql = StringUtils.replace(sqltemp, "_off_", i * page + "");
				log.info("sql:" + sql);

				List<Map<String, Object>> list = null;

				list = dao.getListMap(sql, map, ll);
				for (int j = 0; j < list.size(); j++) {
					Message message = new Message();
					map2 = list.get(j);
					try {
						map2.remove("timestamp");

					} catch (Exception e) {
						log.debug("不包含时间戳");
					}
					message.setMap(map2);
					message.setTarget(properties.get("targettable").toString());
					log.info("[message]" + JSON.toJSONString(message));
					try {
						//SendMessageUtils.sendMessage(properties.get("queuename").toString(), message);
					} catch (Exception e) {

						e.printStackTrace();
					}
				}
			}

		}

	}

}
