package com.bw.util;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;

import java.util.Properties;

public class MyKafkaUtil {
    //准备配置信息
    private static Properties properties = new Properties();

    //指定DWD事实数据默认主题
    private static final String DWD_DEFAULT_TOPIC = "dwd_default_topic";

    //kafka连接参数
    private static final String KAFKA_SERVER = ConfigUtil.getKafkaServer();

    static {
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_SERVER);
    }

    //获取kafka生产者
    public static FlinkKafkaProducer<String> getKafkaSink(String topic) {
        //创建kafka生产对象并返回
        return new FlinkKafkaProducer<String>(topic, new SimpleStringSchema(), properties);
    }

    //public static <T>: 表示这是一个公开的、静态的泛型方法，泛型参数为 T。这意呀着这个方法可以处理任何类型的数据
    public static <T> FlinkKafkaProducer<T> getKafkaSInkBySchema(KafkaSerializationSchema<T> kafkaSchema) {
        //这个属性定义了 Kafka 事务的超时时间，如果在指定的时间内事务没有被提交或回滚，那么它将被自动回滚(就是撤回已经完成的操作)
        properties.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 15 * 60 * 1000 + "");

        //创建kafka生产者对象并返回
        return new FlinkKafkaProducer<T>(DWD_DEFAULT_TOPIC,
                kafkaSchema,
                properties,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE);
    }

    //获取消费者
    public static FlinkKafkaConsumer<String> getKafkaSource(String groupId, String topic) {
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>(topic,
                new SimpleStringSchema(),
                properties);
//        consumer.setStartFromEarliest();
//        consumer.setStartFromLatest();
        return consumer;
    }

    //拼接kafka相关属性到DDL
    public static String getKafkaDDL(String topic, String groupId) {
        return "'connector' = 'kafka'," +
                "  'topic' = '" + topic + "'," +
                "  'properties.bootstrap.servers' = '" + KAFKA_SERVER + "'," +
                "  'properties.group.id' = '" + groupId + "'," +
                "  'scan.startup.mode' = 'latest-offset'," +
                "  'format' = 'json'";
    }

}

















