package cn.piesat.scanning.business.common.algorithm.processAlgorithm;

import cn.piesat.scanning.dto.*;
import com.alibaba.fastjson.JSON;
import cn.piesat.scanning.service.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;

import java.util.HashMap;
import java.util.Map;

/**
 * @author xhl
 */
public abstract class SingleTaskAbstract {

    @Autowired
    private HtScheduleJobService htScheduleJobService;
    @Autowired
    private HtDynamicParameterService htDynamicParameterService;
    @Autowired
    private HtFixedParameterService htFixedParameterService;
    @Autowired
    private DbDmsSchedulerTaskFileRecordService dbDmsSchedulerTaskFileRecordService;
    @Autowired
    private DbDmsSchedulerTaskRecordLogService dbDmsSchedulerTaskRecordLogService;
    @Autowired
    private DbDmsSchedulerTaskFileRecordFlowService dbDmsSchedulerTaskFileRecordFlowService;
    @Autowired
    private HtAlgorithmConfigurationService htAlgorithmConfigurationService;
    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;
    @Autowired
    private HtScanningMetadataService htScanningMetadataService;
    @Value("${spring.kafka.producer.dmstaskexec.topic}")
    private String execTopic;

    @Value("${spring.kafka.producer.hb_dms.topic}")
    private String hbTopic;
    @Value("${spring.kafka.producer.adslstfil.topic}")
    private String adslstfilTopic;


    public abstract void parseRuleAndFindData(Map<String, Object> inputParams, Map<String, Object> ruleParams,
                                              Map<String, Integer> indexParams, HtScheduleJobDTO taskDTO);

    public abstract void parseRuleAndFindData(Map<String, Object> inputParams, Map<String, Object> ruleParams,
                                              HtScheduleJobDTO taskDTO);


    public abstract void parseRuleAndFindData(Map<String, Object> inputParams, Map<String, Object> ruleParams,
                                              HtScheduleJobDTO taskDTO, Object data);

    /**
     * 单个任务插件调用入口方法，查询算法参数和扫描规则参数，后调用自己插件实现的解析规则方法进行数据组装发送
     *
     * @param taskId
     */
    public void process(String taskId) {
        HtScheduleJobDTO taskDTO = htScheduleJobService.findById(taskId);
        //查询算法参数信息 固定参数 输入参数 索引参数
        Map<String, Object> ruleParams = htFixedParameterService.findByFixedtaskId(taskId);
        Map<String, Object> inputParams = htDynamicParameterService.findByDynamictaskId(taskId);
        Map<String, Integer> indexParams = htFixedParameterService.findByFixedIndextaskId(taskId);
        //扫描文件，组装数据，找到一个满足条件的数据发送kafka消息
        parseRuleAndFindData(inputParams, ruleParams, indexParams, taskDTO);
    }


    /**
     * 修改任务记录信息并发送到指定TOPIC消息，需要在每次扫描并组装好数据后调用
     *
     * @param execParams          所有额外参数
     * @param taskDTO             任务信息对象
     * @param fileName            主文件
     * @param algorithmExecParams 算法执行参数
     * @return
     */
    @Transactional(rollbackFor = Exception.class)
    public void updateRecordAndSendMessage(Map<String, Object> execParams, HtScheduleJobDTO taskDTO, String fileName, String fileDate,
                                           Map<String, Object> algorithmExecParams) {
        String mainFileName = fileName;
        String taskPlanId = (String) execParams.get("taskPlanId");
        //添加记录信息
        DbDmsSchedulerTaskFileRecordDTO record = dbDmsSchedulerTaskFileRecordService.findByTaskIdAndFileName(taskDTO.getId(), mainFileName);
        if (record == null) {
            record = dbDmsSchedulerTaskFileRecordService.prepareDataByFileName(taskDTO.getId(), mainFileName, taskPlanId);
        } else {
            dbDmsSchedulerTaskFileRecordService.dataRedo(record);
            dbDmsSchedulerTaskRecordLogService.deleteByRid(record.getId());
            dbDmsSchedulerTaskFileRecordFlowService.deleteByRid(record.getId());
        }
        //查询表中必备字段
        HtAlgorithmConfigurationDTO algorithmDTO = htAlgorithmConfigurationService.findById(taskDTO.getAlgoId());
        Map<String, Object> resultMap = new HashMap<>();
        resultMap.put("algorithmParameter", algorithmExecParams);
        Map<String, Object> basic = new HashMap<>();
        basic.put("algorithmIdentify", algorithmDTO.getAlgorithmIdentify());
        basic.put("hasFlow", false);
        basic.put("taskId", taskDTO.getId());
        basic.put("fileTaskID", record.getId());
        basic.put("mainFileName", fileName);
        basic.put("fileDate", fileDate);
        basic.put("productType", StringUtils.isEmpty(execParams.get("productType")) ? "" : execParams.get("productType"));
        resultMap.put("basic", basic);
        resultMap.put("other", execParams);
        switch (algorithmDTO.getAlgorithmIdentify()) {
            case "Tibet_Grass_PJP":
            case "ImageRectify":
            case "GF_L1":
            case "XIZANG_FOR_NPP":
            case "XIZANG_FOR_NEP":
            case "XIZANG_FOR_GPP":
            case "XIZANG_FOR_AGB":
            case "HB_H9_AHI_L1_2000M":
                kafkaTemplate.send(adslstfilTopic, JSON.toJSONString(resultMap));
                break;
            case "HB_SPI_SPEI":
            case "HB_CLM":
            case "HB_LST":
            case "HEBEI_VEG":
            case "HEBEI_PDI":
            case "FY4A_YMWL":
                kafkaTemplate.send(hbTopic, JSON.toJSONString(resultMap));
                break;
            default:
                System.out.println(JSON.toJSONString(resultMap));
                kafkaTemplate.send(execTopic, JSON.toJSONString(resultMap));
                break;
        }


    }

    /**
     * 重做方法，修改记录信息，查找到记录表中存储的文件名称，再次调用解析方法。
     *
     * @param recordId
     */
    public void redo(String recordId) {
        DbDmsSchedulerTaskFileRecordDTO record = dbDmsSchedulerTaskFileRecordService.findById(recordId);
        if (record == null) {
            return;
        }
        //待执行、执行中无法重做（0：待执行，1：执行中，2之后表示成功、失败、各种异常情况）
        if (record.getState() < 2) {
            return;
        }
        String fileName = record.getFileName();
        HtScanningMetadataDTO metadataDTO = htScanningMetadataService.findByFileName(fileName);
        //源信息已经不存在
        if (metadataDTO == null) {
            return;
        }
        Map<String, Object> ruleParams = htFixedParameterService.findByFixedtaskId(record.getTaskId());
        Map<String, Object> inputParams = htDynamicParameterService.findByDynamictaskId(record.getTaskId());
        HtScheduleJobDTO taskDTO = htScheduleJobService.findById(record.getTaskId());
        parseRuleAndFindData(inputParams, ruleParams, taskDTO, metadataDTO);
    }
}
