package com.dhcc.cdc.sys.service.impl;

import com.alibaba.fastjson.JSONArray;
import com.baomidou.mybatisplus.mapper.EntityWrapper;
import com.baomidou.mybatisplus.mapper.Wrapper;
import com.baomidou.mybatisplus.plugins.Page;
import com.dhcc.cdc.sys.cache.config.CdcConfigCache;
import com.dhcc.cdc.sys.dao.KettleJobLogMapper;
import com.dhcc.cdc.sys.entity.KettleJobLogEntity;
import com.dhcc.cdc.sys.service.IKettleJobLogService;
import com.dhcc.cdc.util.KettleUtil;
import com.dhcc.core.framework.base.service.impl.BaseServiceImpl;
import com.dhcc.core.framework.util.CommonUtil;
import com.dhcc.core.framework.util.DateUtil;
import com.dhcc.core.framework.util.codec.AesUtil;
import com.dhcc.core.modules.system.cache.config.ConfigCache;
import com.dhcc.core.modules.system.dao.JobLogMapper;
import com.dhcc.core.modules.system.entity.JobLog;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * 表数据服务层接口
 * 
 * @ClassName: KettleJobLogServiceImpl
 * @Description: TODO
 * @author WuJiaFeng
 * @date 2020-06-24 10:15:54
 */
@Service("kettleJobLogService")
public class KettleJobLogServiceImpl extends BaseServiceImpl<KettleJobLogMapper, KettleJobLogEntity> implements IKettleJobLogService {

	@Autowired
	JobLogMapper jobLogMapper;
	@Override
	public List<KettleJobLogEntity> page(Page<KettleJobLogEntity> page, Map<String, Object> map) {
		return this.baseMapper.page(page, map);
	}
	@Override
	public List<KettleJobLogEntity> list(Map<String, Object> map) {
		return this.baseMapper.list(map);
	}
	/**   
	 * @Description：描述
	 * @Title: taskErrorLog
	 * @return   
	 * @see com.dhcc.cdc.sys.service.IKettleJobLogService#taskErrorLog()
	 */
	@Override
	public JSONArray taskErrorLog() {
		JSONArray teLogArray = new JSONArray();
		Wrapper<KettleJobLogEntity> wrapper = new EntityWrapper<KettleJobLogEntity>();
		wrapper.where("date_format(logdate,'%Y-%m-%d')={0}",DateUtil.getDay()).where("errors={0}", 1);
		//wrapper.eq("logdate", DateUtil.getDay()).eq("errors", 1);
		KettleJobLogEntity kettleJob = this.selectOne(wrapper);
		if(CommonUtil.isNotEmpty(kettleJob)){
			teLogArray.add("抽取数据定时任务失败;");
		}
		
		Wrapper<JobLog> wpJob = new EntityWrapper<JobLog>();
		wpJob.where("date_format(create_date,'%Y-%m-%d')={0}",DateUtil.getDay()).where("status={0}", 1);
		//wpJob.eq("create_date", DateUtil.getDay()).eq("status", 1);
		List<JobLog> jobLogList = jobLogMapper.selectList(wpJob);
		if(CommonUtil.isNotEmpty(jobLogList)){
			teLogArray.add("单病种业务定时任务失败;");
		}
		return teLogArray;
	}

    @Override
    public Object syndata(Map<String, Object> params) {
		// 获取参数中的开始日期
		String startDate = params.get("startDate")==null?null:params.get("startDate").toString();
		// 获取参数中的结束日期
		String endDate = params.get("endDate")==null?null:params.get("endDate").toString();
		// 获取参数中的episodeId
		String episodeId = params.get("episodeId")==null?null:params.get("episodeId").toString();
		// 是否同步基础数据
		String isSynBase = params.get("isSynBase")==null?"0":params.get("isSynBase").toString();

		//
		Map variables = new HashMap();
		//判断是cache 还是IRIR his数据库
		if (CdcConfigCache.me().getValueByKey("SDS", "his_dbtype", "").equals("IRIS")) {
			variables.put("his_driver", CdcConfigCache.me().getValueByKey("SDS","his_driver"));
			variables.put("his_url", CdcConfigCache.me().getValueByKey("SDS","his_url"));
		}
		// his服务器IP地址
		variables.put("his_ip", CdcConfigCache.me().getValueByKey("SDS", "his_ip"));
		// his数据库名称
		variables.put("his_ns", CdcConfigCache.me().getValueByKey("SDS", "his_ns"));
		// his数据库端口号
		variables.put("his_p", CdcConfigCache.me().getValueByKey("SDS", "his_p"));
		// his数据库用户名
		variables.put("his_u", CdcConfigCache.me().getValueByKey("SDS", "his_u"));
		// his数据库密码
		variables.put("his_pw", AesUtil.decode(CdcConfigCache.me().getValueByKey("SDS", "his_pw")));
		// sds服务器IP地址
		variables.put("sds_ip", CdcConfigCache.me().getValueByKey("SDS", "sds_ip"));
		// sds数据库端口号
		variables.put("sds_p", CdcConfigCache.me().getValueByKey("SDS", "sds_p"));
		// sds数据库名称
		variables.put("sds_s", CdcConfigCache.me().getValueByKey("SDS", "sds_s"));
		// sds数据库用户名
		variables.put("sds_u", CdcConfigCache.me().getValueByKey("SDS", "sds_u"));
		// sds数据库密码
		variables.put("sds_pw", AesUtil.decode(CdcConfigCache.me().getValueByKey("SDS", "sds_pw")));

		try {
			// 调用KettleUtil的runSDSJob方法，传入jobFileName，startDate，endDate，episodeId，"1"，false
			return KettleUtil.runSDSJob("sds.kjb", variables, startDate,endDate, episodeId, isSynBase, false);
		} catch (KettleException e) {
			// 如果发生异常，返回false
			return false;
		}
	}
}