package com.it.sink;

import com.it.pojo.Event;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;

import java.util.Properties;

/**
 * 从kafka中读取数据-->进行数据转换-->写回到kafka中。
 *
 * @author code1997
 */
public class KafkaSink {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        executionEnvironment.setParallelism(1);
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop02:9092");
        DataStreamSource<String> kafkaSource = executionEnvironment.addSource(new FlinkKafkaConsumer<String>("flink-clicks", new SimpleStringSchema(), properties));
        //flink进行转换处理
        SingleOutputStreamOperator<String> events = kafkaSource.map(new MapFunction<String, String>() {

            @Override
            public String map(String value) throws Exception {
                String[] fields = value.split(",");
                return new Event(fields[0].trim(), fields[1].trim(), Long.valueOf(fields[2].trim())).toString();
            }
        });
        //写回到kafka中去
        Properties produceConfig = new Properties();
        produceConfig.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop02:9092");
        events.addSink(new FlinkKafkaProducer<>("flink-events", new SimpleStringSchema(), produceConfig));
        executionEnvironment.execute();
    }

}
