package com.ruoyi.xdata.task;

import java.sql.Connection;
import java.sql.SQLException;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.Map;

import javax.sql.DataSource;

import com.ruoyi.xdata.domain.XdataModel;
import com.ruoyi.xdata.mapper.XdataModelMapper;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.map.HashedMap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import com.ruoyi.util.DbUtil;
import com.ruoyi.xdata.domain.XdataLog;
import com.ruoyi.xdata.domain.XdataPipe;
import com.ruoyi.xdata.mapper.XdataClientMapper;
import com.ruoyi.xdata.mapper.XdataLogMapper;
import com.ruoyi.xdata.mapper.XdataPipeMapper;

/**
 * 数据同步的定时任务
 * @author Administrator
 *
 */
@Slf4j
@Component("xdata")
public class XDataJob {
	@Autowired	 
    private DataSource dataSource; 
	@Autowired	 
	private XdataPipeMapper xdataPipeMapper;
	@Autowired	 
	private XdataLogMapper xdataLogMapper;
	@Autowired
	private XdataModelMapper xdataModelMapper;
	@Autowired	 
	private XdataClientMapper xdataClientMapper;

	/**
	 * 默认一次读1000条数据
	 */
	public static int ROWS = 1000;

	private boolean runing = false;
	
	/**
	 * 定时任务入口
	 */
	synchronized public void sync() {
		if(runing) {
			log.info("正在执行中，本次任务将忽略...");
			return;
		}
		this.runing = true;

		//各数据模型，按优先级进行同步
		XdataModel mc = new XdataModel();
		mc.setStatus("1");
		List<XdataModel> modelList = xdataModelMapper.selectXdataModelList(mc);
		modelList.sort(Comparator.comparingLong(XdataModel::getSorts));
		for(XdataModel model:modelList){
			XdataPipe pc = new XdataPipe();
			pc.setStatus("1");
			pc.setModelId(model.getId());
			List<XdataPipe> list = xdataPipeMapper.selectXdataPipeList(pc);
			if(list == null || list.isEmpty()){
				log.info("数据模型{}没有配置数据通道", model.getTitle());
				continue;
			}

			//各通道按优先级执行
			list.sort(Comparator.comparingLong(XdataPipe::getSorts));
			for(XdataPipe p : list) {
				int count = trans(p);

				if(count > 0) {
					p.setLastResult(0l + count);
					p.setLastTime(new Date());
					xdataPipeMapper.updateXdataPipe(p);
				}
			}
		}


		this.runing = false;
	}

	/**
	 * 自定义任务入口
	 */
	public void job(String ids) {
		String[] id = ids.split("-");
		for(String pipeId:id) {
			XdataPipe pipe = xdataPipeMapper.selectXdataPipeById(Long.valueOf(pipeId));
			if(pipe != null) {
				trans(pipe);
			}else {
				log.error("管道{}不存在", id);
			}
		}
		
	}
	
	/**
	 * 自定义任务入口
	 */
	public void run(Integer id) {
		XdataPipe pipe = xdataPipeMapper.selectXdataPipeById(0l + id);
		if(pipe != null) {
			trans(pipe);
		}else {
			log.error("管道{}不存在", id);
		}
	}
	
	/**
	 * 同步指定管道的数据
	 * @param pipe 管道
	 * @return
	 */
	public int trans(XdataPipe pipe) {
		log.info("执行通道 {}", pipe.getTitle());
		Connection con1 = DbUtil.getConnection(xdataClientMapper.selectXdataClientById(pipe.getClientId()));
		if(con1 == null) {
			log.error("连接目标数据库{}失败!", pipe.getTitle());
			return -1;
		}
		
		XdataLog log = new XdataLog();
		log.setPipeId(pipe.getId());
		log.setStartTime(new Date());
		int count = 0;
		Connection con2 = null;
		try {			
			con2 = dataSource.getConnection();
			DbUtil db1 = new DbUtil(con1);
			DbUtil db2 = new DbUtil(con2);
			con1.setAutoCommit(false);
			con2.setAutoCommit(false);
			if("r".equalsIgnoreCase(pipe.getMode())) {
				count = trans(pipe, db2, db1);
			}else {
				count =trans(pipe, db1, db2);
			}
			con1.commit();
			con2.commit();
			db1.stop();
			db2.stop();
			con1.close();
			con1 = null;
			con2.close();
			con2 = null;
			
			log.setResult("1");
			log.setRows(count + 0l);
		}catch (SQLException e) {
			log.setResult("0");
			log.setRows(0l);
			log.setError(e.getMessage());
			e.printStackTrace();
		}finally {
			try {
				if(con1 != null) {
					con1.close();
					con1 = null;
				}
				if(con2 != null) {
					con2.close();
					con2 = null;
				}
			}catch (Exception e) {
				log.setError(log.getError()+e.getMessage());
			}
			log.setEndTime(new Date());
			xdataLogMapper.insertXdataLog(log);			
		}
		
		return count;
	}
	
	/**
	 * 指行一次数据同步
	 * @param pipe 管道
	 * @param read 源数据
	 * @param write 目标数据
	 * @return
	 * @throws SQLException
	 */
	public int trans(XdataPipe pipe, DbUtil read, DbUtil write) throws SQLException{
		int page = 0;
		int rows = 1000;
		Map<String, Object> params = new HashedMap<>();
		params.put("limit",ROWS);
		Date dt = pipe.getLastTime();
		if(dt == null) {
			dt = new Date(1l);
		}
		params.put("last", dt);
		int count = 0;
		while(true) {
			params.put("offset", page * ROWS);
			String sql = pipe.getReadSql();
			List<Map> list = read.query(sql, params);
			if(list == null || list.size() == 0) {
				break;
			}
			for(Map data : list) {
				if(write.find(pipe.getExistSql(), data) == null) {
					write.exec(pipe.getInsertSql(), data);
				}else {
					write.exec(pipe.getUpdateSql(), data);
				}
				
				count ++;
			}		
			
			if(pipe.getReadSql().indexOf("#{_rows}") > 0 && list.size() == rows) {//有分页且读满所有行，说明可能还有数据
				page ++;
			}else {//不分页直接退出
				break;
			}
			
		}
		return count;
	}
}
