package javaVersion.learn.streamProcess;

import org.apache.flink.api.common.serialization.SimpleStringSchema;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import java.util.Properties;

/**
 * @program: myFlink
 * @description: 从Kafka获取数据
 * @author: WincoMa
 * @create: 2020-07-17 17:56
 **/
public class Pro4_StreamFromKafka {
    public static void main(String[] args) throws Exception {
        //创建流运行环境
        StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment();
        //对运行环境进行设置
        //设置检查点,并设置恢复模式,精确一次恢复
        senv.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        String topic = "test";
        //设置Kafka
        Properties props = new Properties();
        props.setProperty("bootstrap.servers","hadoop1:9092");
        props.setProperty("group.id","flink-group");
        //创建flink Kafka数据源
        FlinkKafkaConsumer011<String> kafkaSource = new FlinkKafkaConsumer011<>(topic, new SimpleStringSchema(), props);
        //设置flink水印
//        kafkaSource.assignTimestampsAndWatermarks()
        DataStreamSource<String> data = senv.addSource(kafkaSource);
        data.print();
        senv.execute("StreamKafkaDemo");

    }
}
