package com.ruoyi.project.flink.latest;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;

import java.util.Properties;

@Component
public class FlinkApplication implements CommandLineRunner {

    @Override
    public void run(String... args) throws Exception {
        /*final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Properties properties = new Properties();
        properties.put("bootstrap.servers","192.168.33.207:9092");
        properties.put("zookeeper.connect","192.168.33.207:2181");
        properties.put("group.id","test1");
        properties.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("auto.offset.reset","latest");
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>("test_1",new SimpleStringSchema(),properties);
        DataStream<String> dataStream = env.addSource(consumer);
        dataStream.print();
        dataStream.addSink(new MySink());
        env.execute("flink learning connector kafka");*/

        //  4、从Kafka中读取数据
//        StreamExecutionEnvironment env04 = StreamExecutionEnvironment.createLocalEnvironment();
//        env04.setParallelism(1);
////	Kafka配置信息
//        Properties properties = new Properties();
//        properties.setProperty("bootstrap.servers", "192.168.33.207:9092");
//        properties.setProperty("group.id", "consumer-group");
//        properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//        properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//        properties.setProperty("auto.offset.reset", "latest");
////	添加数据源(addSource)
//        DataStreamSource<String> stream04 = env04.addSource(new FlinkKafkaConsumer<String>(
//                "test_1",
//                new SimpleStringSchema(),
//                properties
//        ));
//        stream04.print("kafka");
//        env04.execute();
    }
}
