package cn.yanceysong.kafka;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.Properties;

public class KafkaSourceDemo {
    private static final Logger log = LoggerFactory.getLogger(KafkaSourceDemo.class);

    public static void main(String[] args) throws Exception {

//        String propertiesFilePath = "flink-kafka-demo/src/main/resources/application.properties";
//        //方式一:直接使用内置工具类
//        ParameterTool paramsMap = ParameterTool.fromPropertiesFile(propertiesFilePath);
        Properties paramsMap = new Properties();
        paramsMap.setProperty("kafka.bootstrapServers", "alikafka-serverless-cn-omn3tua7701-1000-vpc.alikafka.aliyuncs.com:9092,alikafka-serverless-cn-omn3tua7701-2000-vpc.alikafka.aliyuncs.com:9092,alikafka-serverless-cn-omn3tua7701-3000-vpc.alikafka.aliyuncs.com:9092");
        paramsMap.setProperty("kafka.topic", "gms-msm-return-dev");
        paramsMap.setProperty("kafka.group", "gms_dev_group_xyh");
        paramsMap.setProperty("redis.host", "172.25.32.1");
        //================2.初始化kafka参数==============================

        String bootstrapServers = (String) paramsMap.get("kafka.bootstrapServers");
        String group = (String) paramsMap.get("kafka.group");
        String topic = (String) paramsMap.get("kafka.topic");
        String redisHost = (String) paramsMap.get("redis.host");
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers(bootstrapServers)
                .setTopics(topic)
                .setGroupId(group)
                // 起始消费点
                .setStartingOffsets(OffsetsInitializer.latest())
//                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
                .build();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> kafkaSource = env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");

        kafkaSource.print();

        kafkaSource.addSink(new RedisSink(redisHost, 6379));

        env.execute("kafka listener job");
    }
}
