package com.demo.kafka.flink.example;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;  
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;  
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;  
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;  
  
import java.util.Properties;  
  
public class KafkaFlinkExample {  
  
    public static void main(String[] args) throws Exception {  
        // 设置执行环境  
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();  
  
        // Kafka消费者配置  
        Properties props = new Properties();  
        props.setProperty("bootstrap.servers", "localhost:19092");
        props.setProperty("group.id", "testGroup");  
  
        // 创建Kafka消费者  
        FlinkKafkaConsumer<String> myConsumer = new FlinkKafkaConsumer<>(  
            "first", // 输入Kafka主题
            new SimpleStringSchema(), // 数据反序列化  
            props);  
  
        // 添加Kafka消费者到Flink数据流  
        DataStream<String> stream = env.addSource(myConsumer);  
  
        // 打印数据（或进行其他处理）  
        stream.print();  
  
        // Kafka生产者配置  
        Properties propsProducer = new Properties();  
        propsProducer.setProperty("bootstrap.servers", "localhost:19092");
  
        // 创建Kafka生产者  
        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<>(  
            "target-topic", // 输出Kafka主题
            new SimpleStringSchema(), // 数据序列化  
            propsProducer);  
  
        // 将数据流写回到Kafka  
        stream.addSink(myProducer);  
  
        // 执行程序  
        env.execute("Flink Kafka Example");  
    }  
}