package com.demo.kafka;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

/**
 * @描述  消费kafka数据，并且写入到kafka的topic
 * @创建人 zengqingdong
 * @创建时间 2020/6/15 14:40
 */
public class KafkaSourceDemo {
    private static String servers="8.134.126.106:9092,8.134.126.106:9093,8.134.126.106:9094";
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Properties properties = new Properties();
        properties.put("bootstrap.servers", servers);
        properties.put("serializer.class", "kafka.serializer.StringEncoder");
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        DataStream<String> dataStream = env.addSource(new FlinkKafkaConsumer<String>("test1", new SimpleStringSchema(), properties));
        DataStream<String> dataStream1 = dataStream.map(new MapFunction<String, String>() {
            @Override
            public String map(String s) throws Exception {
                String count = s.split("测试")[1];
                return "已经收到第" + count + "的信息";
            }
        });
        FlinkKafkaProducer<String> myproducer = new FlinkKafkaProducer<String>(servers,"test2",new SimpleStringSchema());
        dataStream1.addSink(myproducer);
       // dataStream1.print();
        env.execute(KafkaSourceDemo.class.getName());
    }
}
