package org.huel.backend.websocket;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.huel.backend.config.WebSocketServer;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

/**
 * @Kafka topic: CourseCount
 * @author Liweijian.
 * @Description 各个课程数量
 * @date 2024/11/22 11:38
 */

@Component
public class CourseCountConsumer {

    private static int totalHadoopCount = 0;
    private static int totalSparkCount = 0;
    private static int totalFlinkCount = 0;
    private static int totalHiveCount = 0;
    private static int totalHbaseCount = 0;
    private static int totalKafkaCount = 0;

    private KafkaConsumer<String, String> kfkConsumer;

    @PostConstruct
    public void init() {
        // 消费者配置信息
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "43.143.125.94:9092");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "CourseCountConsumer");
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        // 构建 Kafka 消费者
        kfkConsumer = new KafkaConsumer<>(properties);
        kfkConsumer.subscribe(Collections.singletonList("CourseCount1"));

        // 启动消费者线程
        new Thread(this::consumeMessages).start();
    }

    private void consumeMessages() {
        try {
            while (true) {
                // 拉取消息
                ConsumerRecords<String, String> conRecord = kfkConsumer.poll(Duration.ofMillis(2000));

                // 当前课程的数量初始化为0
                int currentHadoopCount = 0;
                int currentSparkCount = 0;
                int currentFlinkCount = 0;
                int currentHiveCount = 0;
                int currentHbaseCount = 0;
                int currentKafkaCount = 0;

                for (ConsumerRecord<String, String> consumer : conRecord) {
                    String value = consumer.value();
                    JSONObject jsonObject = JSONObject.parseObject(value);

                    // 从 JSON 对象中获取课程名称和数量
                    String course = jsonObject.getString("course");
                    int count = jsonObject.getIntValue("count");

                    // 更新总数和当前课程的数量
                    switch (course) {
                        case "Hadoop":
                            totalHadoopCount += count;
                            currentHadoopCount = count;
                            break;
                        case "Spark":
                            totalSparkCount += count;
                            currentSparkCount = count;
                            break;
                        case "Flink":
                            totalFlinkCount += count;
                            currentFlinkCount = count;
                            break;
                        case "Hive":
                            totalHiveCount += count;
                            currentHiveCount = count;
                            break;
                        case "HBase":
                            totalHbaseCount += count;
                            currentHbaseCount = count;
                            break;
                        case "Kafka":
                            totalKafkaCount += count;
                            currentKafkaCount = count;
                            break;
                        default:
                            break;
                    }
                }

                // 创建输出的 JSON 对象
                JSONObject outputJson = new JSONObject();

                // 总数量组
                JSONObject totalCountJson = new JSONObject();
                totalCountJson.put("totalHadoopCount", totalHadoopCount);
                totalCountJson.put("totalSparkCount", totalSparkCount);
                totalCountJson.put("totalFlinkCount", totalFlinkCount);
                totalCountJson.put("totalHiveCount", totalHiveCount);
                totalCountJson.put("totalHbaseCount", totalHbaseCount);
                totalCountJson.put("totalKafkaCount", totalKafkaCount);

                // 当前数量组
                JSONObject currentCountJson = new JSONObject();
                currentCountJson.put("currentHadoopCount", currentHadoopCount);
                currentCountJson.put("currentSparkCount", currentSparkCount);
                currentCountJson.put("currentFlinkCount", currentFlinkCount);
                currentCountJson.put("currentHiveCount", currentHiveCount);
                currentCountJson.put("currentHbaseCount", currentHbaseCount);
                currentCountJson.put("currentKafkaCount", currentKafkaCount);

                // 将两组数据放入输出的 JSON 对象
                outputJson.put("totalCounts", totalCountJson);
                outputJson.put("currentCounts", currentCountJson);

                WebSocketServer.sendMessage("CourseCount", String.valueOf(outputJson));

//                // 打印输出
//                System.out.println("================");
//                System.out.println(outputJson.toJSONString());
//                System.out.println("================");
            }
        } catch (Exception e) {
            e.printStackTrace(); // 处理异常并输出错误信息
        } finally {
            // 确保消费者在结束时关闭
            kfkConsumer.close();
        }
    }
}