package demo;


import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaDeserializationSchemaWrapper;

/**
 *
 */
public class Flink10_KAFKA {
    public static void main(String[] args) throws Exception {

        /**
         * 创建执行环境
         * IDEA运行的时候，
         * 需要导入依赖
         */
        StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        executionEnvironment.setParallelism(1);


        KafkaSource<String> topic_test = KafkaSource.<String>builder()
                .setBootstrapServers("114.251.235.19:9092")
                .setGroupId("my-consumer-group")
                .setTopics("topic_test")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setStartingOffsets(OffsetsInitializer.earliest()).build();


        DataStreamSource<String> flink10_kafka = executionEnvironment.fromSource(topic_test, WatermarkStrategy.noWatermarks(), "Flink10_KAFKA");

        flink10_kafka.print();

        executionEnvironment.execute();


    }
}
