package com.huatai.bi.kafaka;

import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import com.huatai.bi.constant.KafkaConstant;
import com.huatai.bi.dto.kafka.DataSetDTO;
import com.huatai.bi.kafaka.DTO.DataSetKafkaDTO;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

/**
 * @BelongsProject: itc-airport-cloud
 * @BelongsPackage: com.huatai.bi.kafaka
 * @Author: hecaiy
 * @CreateTime: 2024-07-03  09:16
 * @Description: TODO 数据加工数据集监听  设置加工数据集全量版本更新
 * @Version: 1.0
 */
@Slf4j
@Component
public class DataSetKafkaListener {
	@Autowired
	SyncDataSetService syncDataSetService;

	/**
	 * sql和python数据同步
	 * @param record
	 */
	@KafkaListener(groupId = KafkaConstant.DATA_SET_SYN_TOPIC,topics = KafkaConstant.DATA_SET_SYN_TOPIC)
	public void dataSetSqlListener(ConsumerRecord record){
		String message = (String) record.value();
		log.info("bi_data_set消费组收到消息：" +message);
		DataSetKafkaDTO directoryParam = JSON.parseObject(message, DataSetKafkaDTO.class);
		Long dtId = directoryParam.getDtId();
		if(null != dtId){
			syncDataSetService.synDataSet(dtId,true,null);
		}
		log.info("消息处理结束");
	}

	/**
	 * 数据加工数据同步
	 * @param record
	 */
	@KafkaListener(groupId = KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS,topics = KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS)
	public void processListener(ConsumerRecord record){
		String message = (String) record.value();
		log.info("process消费组收到消息：" +message);
		DataSetDTO dataSetDTO = JSON.parseObject(message, DataSetDTO.class);
		if(ObjectUtil.isNotEmpty(dataSetDTO) && ObjectUtil.isNotEmpty(dataSetDTO.getDtId())){
			Boolean createTableFlag = false;
			if("edit".equals(dataSetDTO.getType())){
				createTableFlag = true;
			}
			syncDataSetService.dataSetProcess(dataSetDTO.getDtId(),createTableFlag);
		}
		log.info("{}加工消息处理结束",dataSetDTO.getDtId());
	}
}
