package com.iflytek.jcy.supervise.job.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.iflytek.jcy.supervise.datastore.api.JdbcTemplateHolder;
import com.iflytek.jcy.supervise.job.common.enums.CluePushStatusEnum;
import com.iflytek.jcy.supervise.job.common.enums.TaskEnum;
import com.iflytek.jcy.supervise.job.component.httpinterface.ModelHttpInterface;
import com.iflytek.jcy.supervise.job.component.httpinterface.XxlJobHttpInterface;
import com.iflytek.jcy.supervise.job.component.httpinterface.modelResponse.*;
import com.iflytek.jcy.supervise.job.component.httpinterface.result.OutModelResponse;
import com.iflytek.jcy.supervise.job.component.httpinterface.result.ReturnT;
import com.iflytek.jcy.supervise.job.entity.Clue;
import com.iflytek.jcy.supervise.job.entity.Task;
import com.iflytek.jcy.supervise.job.entity.TaskDetail;
import com.iflytek.jcy.supervise.job.entity.TaskLog;
import com.iflytek.jcy.supervise.job.mapper.ClueMapper;
import com.iflytek.jcy.supervise.job.mapper.TaskDetailMapper;
import com.iflytek.jcy.supervise.job.mapper.TaskLogMapper;
import com.iflytek.jcy.supervise.job.mapper.TaskMapper;
import com.iflytek.jcy.supervise.job.service.IModelMiningService;
import com.xxl.job.core.context.XxlJobContext;
import com.xxl.job.core.context.XxlJobHelper;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;

import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;

/** <br>
 * 标题:数据挖掘服务实现类 <br>
 * 描述: <br>
 * 公司: www.iflytek.com<br>
 * 
 * @autho dgyu
 * @time 2022年10月31日 上午12:45:41 */
@Service
public class ModelMiningServiceImpl implements IModelMiningService {

	private static Logger				 logger	= LoggerFactory.getLogger(ModelMiningServiceImpl.class);
	@Autowired
	private JdbcTemplateHolder			 jdbcTemplateHolder;
	@Resource
	private ModelHttpInterface			 modelHttpInterface;
	@Resource
	private TaskMapper					 taskMapper;
	@Autowired
	private MongoTemplate				 mongoTemplate;
	@Autowired
	private TaskLogMapper				 taskLogMapper;
	@Autowired
	private TaskDetailMapper			 taskDetailMapper;
	@Autowired
	private ClueMapper					 clueMapper;
	@Autowired
	private XxlJobHttpInterface			 xxlJobHttpInterface;
	@Value("${spring.data.mongodb.collection}")
	private String						 collection;
	public static final SimpleDateFormat sdf	= new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
	public static final SimpleDateFormat sdf2	= new SimpleDateFormat("yyyyMMdd.HHmmss");
	@Override
	public void doDataMinging(JSONObject JSONObject) throws Exception {
		String param = JSON.toJSONString(JSONObject);
		this.doDataMinging(param, false);
	}

	private void doSleep(Boolean doSleep) throws Exception {
		if (!doSleep) {
			return;
		}
		for (int i = 0; i < 10; i++) {
			XxlJobHelper.log("demoJobHandler模拟任务运行中，开始睡眠30s i:{}", i);
			logger.info("demoJobHandler模拟任务运行中，开始睡眠30s i:{}", i);
			Thread.sleep(30 * 1000);
			XxlJobHelper.log("demoJobHandler模拟任务运行中，结束睡眠30s");
			logger.info("demoJobHandler模拟任务运行中，结束睡眠30s");
		}
	}

	@Override
	public void doDataMinging(String param, Boolean doSleep) throws Exception {
		Long jobId = XxlJobHelper.getJobId();
		logger.info("doDataMinging调度中心任务jobId:{}", jobId);
		XxlJobHelper.log("doDataMinging调度中心任务jobId:{}", jobId);
		Integer taskId = null;
		String startRunTime = null;
		Date taskStartTime = new Date();
		JSONObject jSONObject = null;
		try {
			jSONObject = JSONObject.parseObject(param);
			taskId = jSONObject.getInteger("taskId");
			startRunTime = jSONObject.getString("taskStartTime");
		}
		catch (Exception e1) {
			LambdaQueryWrapper<Task> query = new LambdaQueryWrapper<>();
			query.eq(Task::getXxlTaskId, jobId);
			List<Task> list = this.taskMapper.selectList(query);
			if (CollectionUtils.isEmpty(list)) {
				logger.info("doDataMinging调度中心任务不存在jobId:{}", jobId);
				XxlJobHelper.log("doDataMinging调度中心任务不存在jobId:{}", jobId);
				throw new Exception("调度中心任务不存在jobId:" + jobId);
			}
			taskId = list.get(0).getId();
		}
		String jobLogFileName = XxlJobHelper.getJobLogFileName();
		XxlJobHelper.log("#######################################JobLogFileName:{}", jobLogFileName);
		// String xxlJobLogId = getXxlJobLogId(jobLogFileName);
		Long xxlJobLogId = XxlJobHelper.getLogId();
		XxlJobHelper.log("#######################################xxlJobLogId:{}", xxlJobLogId);
		TaskLog taskLog = null;
		try {
			// 查看任务开始执行时间是否已经到达
			if (StringUtils.isNotEmpty(startRunTime)) {
				Date startRunTimeDate = sdf.parse(startRunTime);
				if (startRunTimeDate.getTime() > taskStartTime.getTime()) {
					logger.info("doDataMinging任务taskId:{}未达到任务开始执行时间，停止执行", taskId);
					XxlJobHelper.log("doDataMinging任务不存在taskId:{}未达到任务开始执行时间，停止执行", taskId);
					return;
				}
			}
			taskLog = this.addTaskLog(taskId, taskStartTime, String.valueOf(xxlJobLogId));
			doSleep(doSleep);
			final TaskLog taskLogFinal = taskLog;
			Task task = this.taskMapper.selectById(taskId);
			if (null == task) {
				logger.info("doDataMinging任务不存在taskId:{}", taskId);
				XxlJobHelper.log("doDataMinging任务不存在taskId:{}", taskId);
				throw new Exception("任务不存在taskId:" + taskId);
			}
			// int i = 1 / 0;
			Integer modelId = task.getModelId();
			Integer datasetId = task.getDatasetId();
			if (modelId == null || modelId <= 0 || datasetId == null || datasetId <= 0) {
				logger.info("doDataMinging任务模型or数据集未配置taskId:{}", taskId);
				XxlJobHelper.log("doDataMinging任务模型or数据集未配置taskId:{}", taskId);
				throw new Exception("任务模型or数据集未配置taskId:" + taskId);
			}
			// 获取模型下详情
			OutModelResponse<ModelInfo> out = this.modelHttpInterface.getModelFull(modelId);
			ModelInfo modelInfo = out.getData();
			if (null == modelInfo) {
				logger.info("doDataMinging模型不存在taskId:{},modelId:{}", taskId, modelId);
				XxlJobHelper.log("doDataMinging模型不存在taskId:{},modelId:{}", taskId, modelId);
				throw new Exception("模型不存在taskId:" + taskId + ",modelId:" + modelId);
			}
			List<Checkpointlist> checkpointlist = modelInfo.getCheckpointlist();
			if (CollectionUtils.isEmpty(checkpointlist)) {
				logger.info("doDataMinging模型下监督点不存在taskId:{},modelId:{}", taskId, modelId);
				XxlJobHelper.log("doDataMinging模型下监督点不存在taskId:{},modelId:{}", taskId, modelId);
				throw new Exception("监督点不存在，taskId:" + taskId + ",modelId:" + modelId);
			}
			// 增加线索记录
			Clue clue = this.addClue(task, 0, taskStartTime, taskLog);
			AtomicReference<Integer> totalClueNum = new AtomicReference<>(0);
			final Integer finalTaskId = taskId;
			// 遍历监督点 执行SQL
			checkpointlist.forEach(point -> {
				point.getRunnableDatasetList().forEach(runnableDataset -> {
					// 过滤当前任务选择的数据集
					if (runnableDataset.getId().equals(task.getDatasetId())) {
						String sql = runnableDataset.getSql();
						Integer clutNum = this.excuteSql(sql, task, point, taskStartTime, clue, taskLogFinal);
						totalClueNum.updateAndGet(v -> v + clutNum);
					}
					else {
						logger.info("doDataMinging当前任务taskId:{},datasetId:{} runnableModelId:{}不一致跳过", finalTaskId, task.getDatasetId(), runnableDataset.getId());
						XxlJobHelper.log("doDataMinging当前任务taskId:{},datasetId:{} runnableModelId:{}不一致跳过", finalTaskId, task.getDatasetId(), runnableDataset.getId());
					}
				});
			});
			Date endTime = new Date();
			LambdaUpdateWrapper<Task> lambdaUpdateWrapper1 = new LambdaUpdateWrapper<Task>().set(Task::getLastExcuteTime, taskStartTime)
			        .set(Task::getTaskStatus, TaskEnum.TASK_STATUS_RUNNING.getCode()).set(Task::getUpdateTime, endTime).eq(Task::getId, taskId);
			this.taskMapper.update(null, lambdaUpdateWrapper1);

			LambdaUpdateWrapper<Clue> lambdaUpdateWrapper2 = new LambdaUpdateWrapper<Clue>().set(Clue::getClueNum, totalClueNum.get()).set(Clue::getEndTime, endTime).set(Clue::getUpdateTime, endTime)
			        .eq(Clue::getId, clue.getId());
			this.clueMapper.update(null, lambdaUpdateWrapper2);

			LambdaUpdateWrapper<TaskLog> lambdaUpdateWrapper3 = new LambdaUpdateWrapper<TaskLog>().set(TaskLog::getStatus, TaskEnum.TASK_RUN_STATUS_RUNNING.getCode())
			        .set(TaskLog::getExceptionNum, totalClueNum.get()).set(TaskLog::getTaskRunEndTime, endTime).eq(TaskLog::getId, taskLog.getId());
			this.taskLogMapper.update(null, lambdaUpdateWrapper3);

			ReturnT<String> returnT = this.xxlJobHttpInterface.nextTriggerTimeById(task.getXxlTaskId());
			if (returnT.getCode() == 200) {
				String content = returnT.getContent();
				// 单次任务下次执行时间时空值
				if (StringUtils.isNotEmpty(content)) {
					logger.info("doDataMinging任务taskId:{}获取到下次执行时间是NextExcuteTime:{}", taskId, content);
					XxlJobHelper.log("doDataMinging任务taskId:{}获取到下次执行时间是NextExcuteTime:{}", taskId, content);
					LambdaUpdateWrapper<Task> lambdaUpdateWrapper4 = new LambdaUpdateWrapper<Task>().set(Task::getNextExcuteTime, content).set(Task::getUpdateTime, taskStartTime).eq(Task::getId,
					        taskId);
					this.taskMapper.update(null, lambdaUpdateWrapper4);
				}
				else {// 单次任务运行状态未运行完毕
					LambdaUpdateWrapper<Task> lambdaUpdateWrapper4 = new LambdaUpdateWrapper<Task>().set(Task::getTaskStatus, TaskEnum.TASK_STATUS_OVER.getCode())
					        .set(Task::getUpdateTime, taskStartTime).eq(Task::getId, taskId);
					this.taskMapper.update(null, lambdaUpdateWrapper4);
				}

			}
			logger.info("modelDataMiningJobHandler任务:{}执行结束{},共计发现线索:{}条", taskId, sdf.format(new Date()).replace("T", ""), totalClueNum);
			XxlJobHelper.log("modelDataMiningJobHandler任务:{}执行结束{},共计发现线索:{}条", taskId, sdf.format(new Date()).replace("T", ""), totalClueNum);

			// 更新日志状态为执行成功
			LambdaUpdateWrapper<TaskLog> lambdaUpdateWrapper33 = new LambdaUpdateWrapper<TaskLog>().set(TaskLog::getStatus, TaskEnum.TASK_RUN_STATUS_SUCCESS.getCode())
			        .set(TaskLog::getExceptionNum, totalClueNum.get()).set(TaskLog::getTaskRunEndTime, new Date()).eq(TaskLog::getId, taskLog.getId());
			this.taskLogMapper.update(null, lambdaUpdateWrapper33);
		}
		catch (Exception e) {
			logger.error("执行异常，异常信息:{}", e.getMessage());
			XxlJobHelper.log("执行异常，异常信息:{},参数:{}", e.getMessage(), param);
			XxlJobHelper.handleResult(XxlJobContext.HANDLE_CODE_FAIL, e.getMessage());
			// 更新任务状态运行结束
			LambdaUpdateWrapper<TaskLog> lambdaUpdateWrapper3 = new LambdaUpdateWrapper<TaskLog>().set(TaskLog::getStatus, TaskEnum.TASK_RUN_STATUS_EXCEPTION.getCode())
			        .set(TaskLog::getExceptionNum, 0).set(TaskLog::getTaskRunEndTime, new Date()).eq(TaskLog::getId, taskLog.getId());
			this.taskLogMapper.update(null, lambdaUpdateWrapper3);

			LambdaUpdateWrapper<Task> lambdaUpdateWrapper1 = new LambdaUpdateWrapper<Task>().set(Task::getLastExcuteTime, taskStartTime)
			        .set(Task::getTaskStatus, TaskEnum.TASK_STATUS_EXCEPTION.getCode()).set(Task::getUpdateTime, new Date()).eq(Task::getId, taskId);
			this.taskMapper.update(null, lambdaUpdateWrapper1);
		}
	}

	private Integer excuteSql(String sql, Task task, Checkpointlist checkpointlist, Date taskStartTime, Clue clue, TaskLog taskLog) {
		// sql = "SELECT \"name\" AS B00001, \"gender\" AS B00002, \"birthday\" AS B00003 FROM v_56 WHERE \"type\" = 2 AND \"nation\" = '汉族' AND \"gender\" =
		// '男' limit 11";
		logger.info("执行sql taskId:{}, sql:{}", task.getId(), sql);
		XxlJobHelper.log("执行sql taskId:{} ,sql:{}", task.getId(), sql);

		Date startTime = new Date();
		logger.info("modelDataMiningJobHandler start:{}", sdf.format(startTime).replace("T", ""));
		XxlJobHelper.log("modelDataMiningJobHandler start:{}", sdf.format(startTime).replace("T", ""));

		JdbcTemplate jdbcTemplate = jdbcTemplateHolder.getNamedParameterJdbcTemplate().getJdbcTemplate();
		// 查询数据仓库得到数据结果
		List<Map<String, Object>> list = jdbcTemplate.queryForList(sql);
		if (CollectionUtils.isEmpty(list)) {
			logger.info("执行sql 结果为空taskId:{}, sql:{}", task.getId(), sql);
			XxlJobHelper.log("执行sql taskId:{} ,start:{}", task.getId(), sql);
			return 0;
		}
		String result = JSON.toJSONString(list);
		logger.info("modelDataMiningJobHandler执行结果:{}", result);
		XxlJobHelper.log("modelDataMiningJobHandler执行结果:{}", result);
		int clueNum = this.installPointData(list, checkpointlist, task, clue, taskLog);
		return clueNum;
	}

	private TaskLog addTaskLog(Integer taskId, Date taskStartTime, String xxlLogId) {
		TaskLog taskLog = new TaskLog();
		taskLog.setTaskId(taskId);
		taskLog.setTaskRunStartTime(taskStartTime);
		taskLog.setBehavior(TaskEnum.BEHAVIOR_EXCUTE.getCode());
		taskLog.setStatus(TaskEnum.TASK_RUN_STATUS_RUNNING.getCode());
		taskLog.setXxlLogId(xxlLogId);
		taskLogMapper.insert(taskLog);
		return taskLog;
	}

	private TaskDetail addTaskDetail(Integer taskId, Integer taskLogId, Integer supervisionPointId, String supervisionPointName, String exceptionLogic, String ruleComment, String pointContent,
	        String supervisionObject, String supervisionObjectName, String supervisionDesc, Integer clueId, Date createTime) {
		TaskDetail taskDetail = new TaskDetail();
		taskDetail.setTaskId(taskId);
		taskDetail.setTaskLogId(taskLogId);
		taskDetail.setSupervisionPointId(supervisionPointId);
		taskDetail.setSupervisionPointName(supervisionPointName);
		taskDetail.setExceptionLogic(exceptionLogic);
		taskDetail.setExceptionTypeDesc(ruleComment);
		taskDetail.setSupervisionPointContent(pointContent);
		taskDetail.setSupervisionObject(supervisionObject);
		taskDetail.setSupervisionObjectName(supervisionObjectName);
		taskDetail.setSupervisionDesc(supervisionDesc);
		taskDetail.setCreateTime(createTime);
		taskDetail.setClueId(clueId);
		this.taskDetailMapper.insert(taskDetail);
		return taskDetail;
	}

	private Clue addClue(Task task, Integer clueNum, Date startTime, TaskLog taskLog) {
		Clue clue = new Clue();
		clue.setModelId(task.getModelId());
		clue.setTaskId(task.getId());
		clue.setTaskName(task.getTaskName());
		clue.setTaskName(task.getTaskName());
		clue.setClueName(task.getTaskName() + sdf2.format(startTime));
		clue.setStartTime(startTime);
		clue.setEndTime(null);
		clue.setCluePushStatus(CluePushStatusEnum.UNPUSHED.getCode());
		clue.setCreateTime(new Date());
		clue.setDatasetId(task.getDatasetId());
		clue.setModelName(task.getModelName());
		clue.setDatasetName(task.getDatasetName());
		clue.setTaskLogId(taskLog.getId());
		this.clueMapper.insert(clue);
		return clue;
	}

	/** 组装监督点报告字段和SQL查询结果
	 * 
	 * @param listMap
	 * @param checkpoint
	 *        监督点 */
	private int installPointData(List<Map<String, Object>> listMap, Checkpointlist checkpoint, Task task, Clue clue, TaskLog taskLog) {
		// 报告字段
		List<Reportfieldlist> reportfieldlist = checkpoint.getReportfieldlist();
		if (CollectionUtils.isEmpty(reportfieldlist)) {
			return 0;
		}
		// 报告字段英文名
		List<String> numbers = reportfieldlist.stream().map(Reportfieldlist::getNumber).collect(Collectors.toList());
		// 报告字段中文名
		List<String> names = reportfieldlist.stream().map(Reportfieldlist::getName).collect(Collectors.toList());
		List<Map<String, Object>> mongoDataMap = new ArrayList<>();
		Integer clueId = clue.getId();
		for (Map<String, Object> map : listMap) {
			Map<String, Object> mongoMap = new HashMap<>();
			Objectfield objectfield = checkpoint.getObjectfield();
			String reportObjectField = "";
			//String reportObjectFieldName = "";
			if (objectfield == null) {
				continue;
			}
			reportObjectField = objectfield.getNumber();
			// 报告字段不包括监督对象字段直接忽略
			if (!numbers.contains(reportObjectField)) {
				continue;
			}
			//reportObjectFieldName = objectfield.getName();
			AtomicReference<String> value = new AtomicReference<>("");
			List<Object> values = new ArrayList<>();
			// 监督对象值
			AtomicReference<String> jddxz = new AtomicReference<>("");
			String finalReportObjectField = reportObjectField;
			//String finalReportObjectFieldName = reportObjectFieldName;
			//List<String> numbers2 = numbers.stream().filter(v -> !v.equalsIgnoreCase(finalReportObjectField)).collect(Collectors.toList());
			//List<String> names2 = names.stream().filter(v -> !v.equalsIgnoreCase(finalReportObjectFieldName)).collect(Collectors.toList());
			reportfieldlist.forEach(field -> {
				String key = field.getNumber();
				if (map.containsKey(key)) {
					Object v = map.get(key);
					String finalvalue = String.valueOf(v == null ? "" : v);
					value.set(finalvalue);
					// 获取报告对象的值
					if (finalReportObjectField.equalsIgnoreCase(key)) {
						jddxz.set(finalvalue);
					}
				}
				values.add(value.get());
			});
			// 字段英文名 过滤掉报告字段
			mongoMap.put("numbers", numbers);
			// 字段中文名 过滤掉报告字段
			mongoMap.put("names", names);
			// 报告对象字段中文名
			mongoMap.put("reportObject", objectfield == null ? "" : objectfield.getName());
			mongoMap.put("reportObjectName", jddxz.get());
			mongoMap.put("taskId", task.getId());
			mongoMap.put("taskLogId", taskLog.getId());
			mongoMap.put("pointId", checkpoint.getId());
			mongoMap.put("clueId", clue.getId());
			mongoMap.put("createTime", sdf.format(new Date()));
			if (mongoMap.keySet().size() > 0) {
				// 组装taskDetail
				TaskDetail taskDetail = addTaskDetail(task.getId(), taskLog.getId(), checkpoint.getId(), checkpoint.getName(), checkpoint.getRule().getNumber(), checkpoint.getRule().getComment(),
				        checkpoint.getRule().getName(), objectfield == null ? "" : objectfield.getName(), jddxz.get(), checkpoint.getRule().getComment(), clueId, new Date());
				mongoMap.put("taskDetailId", taskDetail.getId());
				mongoMap.put("_id", taskDetail.getId());
				// 字段值
				mongoMap.put("values", values);
				mongoDataMap.add(mongoMap);
			}
		}
		if (CollectionUtils.isNotEmpty(mongoDataMap)) {
			// todo 插入mogo
			XxlJobHelper.log("modelDataMiningJobHandler  installPointData end 任务taskId:{}共计入MongoDB:{}条，数据是:{}", task.getId(), CollectionUtils.size(mongoDataMap), JSON.toJSONString(mongoDataMap));
			logger.info(JSON.toJSONString(mongoDataMap));
			mongoTemplate.insert(mongoDataMap, collection);
		}
		return CollectionUtils.size(mongoDataMap);
	}
}
