package com.ruoyi.kafka;


import com.alibaba.fastjson.JSON;
import com.ruoyi.common.json.JSONObject;
import com.ruoyi.websocket.WebSocketUsers;


import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.springframework.stereotype.Service;


import javax.annotation.PostConstruct;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

@Service
public class KafkaConsumers {

    //    private static final String TOPIC="topic_test_xh"; //"BA4035_BA4035010001_mhhwclxsgjb";//
    private static final String TOPIC="SHEHUIZHILI-TRACK-PERSON";
        private static final String CONFIG_DIR="D:\\RuoYi\\";
//    private static final String CONFIG_DIR = "\\dlxx_BWCW\\rydlxx-test";
    private static final String BOOTSTRAP_SERVERS="11.100.5.102:32610,11.100.5.101:32283,11.100.5.99:31994";
    private static final String TOS_PRINCIPAL="tos_k2142rz";

    private volatile boolean isRunning = true;

    @PostConstruct
    public void printReceiveMsg() {
    new Thread(()->{
    try {
    Thread.sleep(5000);

        // 集群krb5.conf文件配置
        System.setProperty("java.security.krb5.conf", CONFIG_DIR + "krb5.conf");
        // 集群jaas.conf文件配置
        System.setProperty("java.security.auth.login.config", CONFIG_DIR + "jaas.conf");
//       System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
        // System.setProperty("sun.security.krb5.debug", "true");
        Properties props = new Properties();
        props.put("bootstrap.servers", BOOTSTRAP_SERVERS);

        props.put("sasl.kerberos.service.principal.instance", TOS_PRINCIPAL);
//           //指定kafka消费者组 命名规则 账号名
        props.put("group.id", "default");
        //提交方式
        props.put("enable.auto.commit", "false");
        //一次poll间隔最大时间
        props.put("auto.commit.interval.ms", "1000");// acks
//            props.put("auto.offset.reset", "earliest");// retries
        //当消费者读取偏移量无效的情况下，需要重置消费起始位置，默认为latest（从消费者启动后生成的记录），
        // 另外一个选项值是 earliest，将从有效的最小位移位置开始消费
        props.put("auto.offset.reset", "earliest");// retries

        //consumer默认和kafka broker建立长连接，当连接空闲时间超过该参数设置的值，那么将连接断开，当使用时，在进行重新连接
//        props.put("connections.max.idle .ms", "60000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        props.put("security.protocol", "SASL_PLAINTEXT");// security.protocol
        props.put("sasl.mechanism", "GSSAPI");// sasl.mechanism
        props.put("sasl.kerberos.service.name", "kafka");// sasl.kerberos.service.name
        System.out.println("读取配置信息成功！");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);

        consumer.subscribe(Arrays.asList(TOPIC));
        JSONObject jsonObject = new JSONObject();

            while (isRunning) {
                ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(10));
                if (!consumerRecords.isEmpty()) {
                    for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                        System.out.println("TopicName: " + consumerRecord.topic() + " Partition:" +
                                consumerRecord.partition() + " Offset:" + consumerRecord.offset() + "" +
                                " Msg:" + consumerRecord.value());
                        //进行逻辑处理
                        jsonObject.put("JXH",consumerRecord.value());
                        WebSocketUsers.sendMessageToUsersByText(JSON.toJSONString(jsonObject));
                    }
//                    consumer.commitAsync();//异步提交
                }
            }
        }catch (Exception e){
            //处理异常
            e.printStackTrace();

        }
//    finally {
////            consumer.commitAsync();
//            close();
//        }
      }).start();

    }

//    public void close() {
//        isRunning = false;
//        if (consumer != null) {
//            consumer.close();
//        }
//    }

    public static void main(String[] args) throws Exception {
        KafkaConsumers kafkaConsumers = new KafkaConsumers();
        kafkaConsumers.printReceiveMsg();

    }


}
