package com.ikas.ai.server.kafka;//package com.ikas.ai.server.kafka;
//
//import cn.hutool.core.collection.CollectionUtil;
//import cn.hutool.json.JSONUtil;
//import com.alibaba.fastjson.JSONArray;
//import com.alibaba.fastjson.JSONObject;
//import com.alibaba.fastjson.JSONValidator;
//import com.ikas.ai.model.KafkaMeteData;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.kafka.clients.consumer.ConsumerRecord;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.kafka.annotation.KafkaListener;
//import org.springframework.kafka.support.Acknowledgment;
//import org.springframework.stereotype.Component;
//
//import java.util.ArrayList;
//import java.util.Collections;
//import java.util.List;
//
///**
// * 工况流程topic消费
// */
//@Slf4j
//@Component
//public class MysqlKafkaWorkFlowConsumerService {
//
//    @Autowired
//    private WorkInstanceService workInstanceService;
//    @Autowired
//    private RedisDataOperation initDataRedisListener;
//
//    // 消费监听
//    @KafkaListener(topics = {KafkaInitialConfiguration.TOPIC_U01}, topicPattern = "0", groupId = KafkaInitialConfiguration.WORK_FLOW_TOPIC_GROUP)
//    public void onMessageU01(List<ConsumerRecord<String, String>> list, Acknowledgment ack) {
//        processKafkaMessage(list, ack);
//    }
//
//    // 消费监听
//    @KafkaListener(topics = {KafkaInitialConfiguration.TOPIC_U02}, topicPattern = "0", groupId = KafkaInitialConfiguration.WORK_FLOW_TOPIC_GROUP)
//    public void onMessageU02(List<ConsumerRecord<String, String>> list, Acknowledgment ack) {
//        processKafkaMessage(list, ack);
//    }
//
//    //     消费监听
//    @KafkaListener(topics = {KafkaInitialConfiguration.TOPIC_U03}, topicPattern = "0", groupId = KafkaInitialConfiguration.WORK_FLOW_TOPIC_GROUP)
//    public void onMessageU03(List<ConsumerRecord<String, String>> list, Acknowledgment ack) {
//        processKafkaMessage(list, ack);
//    }
//
//    // 消费监听
//    @KafkaListener(topics = {KafkaInitialConfiguration.TOPIC_U04}, topicPattern = "0", groupId = KafkaInitialConfiguration.WORK_FLOW_TOPIC_GROUP)
//    public void onMessageU04(List<ConsumerRecord<String, String>> list, Acknowledgment ack) {
//        processKafkaMessage(list, ack);
//    }
//
//    public void processKafkaMessage(List<ConsumerRecord<String, String>> list, Acknowledgment ack) {
//        list.forEach(record -> {
//            try {
//                List<KafkaMeteData> meteDataList = new ArrayList<>();
//                final JSONValidator.Type type = JSONValidator.from(record.value()).getType();
//                if (type == JSONValidator.Type.Array) {
//                    meteDataList = JSONArray.parseArray(record.value(), KafkaMeteData.class);
//                } else if (type == JSONValidator.Type.Object) {
//                    meteDataList = Collections.singletonList(JSONObject.parseObject(record.value(), KafkaMeteData.class));
//                }
//                if (CollectionUtil.isEmpty(meteDataList)) {
//                    return;
//                }
//                log.info("====WORKFLOW-KAFKA-====>offset>[{}]--timestamp>[{}]------>meteDataList:{}", record.offset()
//                        , record.timestamp(), JSONUtil.toJsonStr(meteDataList));
//                //设置类型排序
//                meteDataList.stream().forEach(kafkaMeteData -> {
//                    log.info("kafkaMeteData:{}", JSONUtil.toJsonStr(kafkaMeteData));
//                    workInstanceService.processKafkaMeteData(kafkaMeteData);
//                });
//
////                meteDataList.stream().map(kafkaMeteData -> {
////                    List<DataMete> cacheDataMeteList = initDataRedisListener.getCacheList(kafkaMeteData.getMachineNo(), kafkaMeteData.getMeteCode());
////                    if (CollectionUtil.isNotEmpty(cacheDataMeteList)) {
////                        kafkaMeteData.setType(cacheDataMeteList.get(0).getType());
////                    } else {
////                        log.info("缓存中未获取到测点值___{}",JSONUtil.toJsonStr(kafkaMeteData));
////                        kafkaMeteData.setType(DataMeteTypeEnum.FLOW.getCode());
////                    }
////                    return kafkaMeteData;
////                }).sorted(Comparator.comparing(KafkaMeteData::getType).reversed()).forEach(kafkaMeteData -> {
////                    log.info("kafkaMeteData:{}", JSONUtil.toJsonStr(kafkaMeteData));
////                    workInstanceService.processKafkaMeteData(kafkaMeteData);
////                });
//            } catch (Exception e) {
//                log.error("=====测点数据处理错误======", e);
//            }
//            //手动提交
//            ack.acknowledge();
//        });
//    }
//
//}
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
