package ex.datastream.connectors;

import ex.datastream.functions.richFunction.StatefulKeyedProcessFunc;
import ex.datastream.sources.KafkaDataSource;
import ex.datastream.functions.function.FlatMapFuncBySplitter02;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 基于kafka连接器,读取数据源
 */
public class KafkaTest {
    public static void main(String[] args) throws Exception {
        // 创建执行环境
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 加载数据源
        KafkaDataSource kafkaDataSource=new KafkaDataSource();
        KafkaSource source =kafkaDataSource.getKafkaSource();
        DataStreamSource<String> dataStreamSource = env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");;

        // 处理数据
        SingleOutputStreamOperator<Tuple2<String, Integer>> items = dataStreamSource.flatMap(new FlatMapFuncBySplitter02());

        items.keyBy(value -> value.f0).process(new StatefulKeyedProcessFunc()).uid("my-uid");

        env.execute();

    }
}
