package com.showdor.springboot.jingqing;

import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.security.scram.ScramLoginModule;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Properties;

public class FlinkKafkaJob {
    private static final Logger LOG = LoggerFactory.getLogger(FlinkKafkaJob.class);
    private static final String BOOTSTRAP_SERVERS = "115.238.84.147:9093";
    //private static final String TOPIC = "external.quZhou.throughCar";
    //private static final String TOPIC = "external.situationIndicator";
    private static final String TOPIC = "external.wenZhou.eventEvent";

    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // Kafka 配置
        Properties props = new Properties();

        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put("group.id", "test-group");
        props.put("security.protocol", "SASL_PLAINTEXT");
        props.put("sasl.mechanism", "SCRAM-SHA-256");
        props.put("sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"guangxin.quzhou\" password=\"78K%!4g8LV\";");
        //props.put("sasl.jaas.config", ScramLoginModule.class.getName() + " required username=\"" + USERNAME + "\" password=\"" + PASSWORD + "\";");
/**
 * bootstrap.servers=115.238.84.147:9093
 * security.protocol=SASL_PLAINTEXT
 * sasl.mechanism=SCRAM-SHA-256
 * sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username='guangxin.quzhou' password='78K%!4g8LV';
 */
        // 增强型消费者配置
        props.put(ConsumerConfig.RECONNECT_BACKOFF_MS_CONFIG, "1000");
        props.put(ConsumerConfig.RECONNECT_BACKOFF_MAX_MS_CONFIG, "10000");

        // 创建增强型消费者
        ResilientKafkaConsumer<String> consumer = new ResilientKafkaConsumer<>(
                TOPIC,
                new SimpleStringSchema(),
                props
        );
        consumer.setStartFromEarliest();
        // 配置检查点和重启策略
        env.enableCheckpointing(5000);
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(
                3, // 最大重启次数
                10000 // 重启间隔
        ));

        // 数据处理流水线
        DataStream<String> stream = env.addSource(consumer);
        stream.map(new AlertingMapper()).print();

        env.execute("Resilient Kafka Consumer Job");
    }

    public static class AlertingMapper extends RichMapFunction<String, String> {
        @Override
        public String map(String value) throws Exception {
            // 示例处理逻辑
            return "==============Processed: " + value;
        }

        @Override
        public void close() throws Exception {
            LOG.info("Mapper资源释放");
        }
    }
}