package com.gitee.lirenqing.flinksimpledatakafaksource;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

public class FlinkSimpleDataKafakSourceApplication {


    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "localhost:9092");
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<String>("book", new SimpleStringSchema(), properties);

        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "localhost:9092");
        props.setProperty("group.id", "test_group");
        props.put("enable.auto.commit", true);//设置是否为自动提交
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        DataStreamSource<String> dataStreamSource = env.addSource(new RichSourceFunction<String>() {
            private KafkaConsumer<String, String> consumer;
            private volatile boolean running = true;

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                consumer = new KafkaConsumer<String, String>(props);
                consumer.subscribe(Arrays.asList("book"));
            }

            @Override
            public void run(SourceContext<String> sourceContext) throws Exception {
                while (running) {
                    ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(1));
                    if (!consumerRecords.isEmpty()) {
                        for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                            System.out.println("TopicName: " + consumerRecord.topic() + " Partition:" +
                                    consumerRecord.partition() + " Offset:" + consumerRecord.offset() + "" +
                                    " Msg:" + consumerRecord.value());
                            //进行逻辑处理
                            sourceContext.collect(consumerRecord.value());
                        }
                        consumer.commitSync();//同步提交
                    }
                }
            }

            @Override
            public void cancel() {
                running = false;
            }
        });

        dataStreamSource.addSink(new RichSinkFunction<String>() {
            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
            }

            @Override
            public void close() throws Exception {
                super.close();
            }

            @Override
            public void invoke(String value, Context context) throws Exception {
                System.out.println("kafka skin:" + value);
            }
        });


        env.execute("data source kafka");
    }

}
