package org.example.flink.operation;

import org.example.flink.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Hello03SourceFromKafaka {
    public static void main(String[] args) throws Exception {
        //创建一个线程持续向kafaka添加数据
        new Thread(() -> {
            for (int i = 0; i < 100; i++) {
                KafkaUtil.sendMsg("input-topic", "Hello Flink:" + i + " time: " + System.currentTimeMillis());
                try {
                    Thread.sleep(200);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }).start();


        //执行环境
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        //Source
        KafkaSource<String> sourceSetting = KafkaSource.<String>builder()
                .setBootstrapServers("192.168.101.100:9092,192.168.101.101:9092,192.168.101.102:9092")
                .setTopics("input-topic")
                .setGroupId("my-group")
                .setStartingOffsets(OffsetsInitializer.earliest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();
        DataStreamSource<String> source = environment
                .fromSource(sourceSetting, WatermarkStrategy.noWatermarks(), "kafka source");

        //transform sink
        source.map(word -> word.toUpperCase()).print();

        environment.execute();
    }
}
