package cn.itcast.flink.source;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.util.Properties;

/**
 * Author itcast
 * Date 2021/9/20 9:11
 * 实现flink将数据写入到kafka集群中
 * 开发步骤：
 * 1.开启流处理环境
 * 2.设置并行度、chk、重启策略等参数
 * 3.创建FlinkKafkaProducer类
 * 3.1.配置属性
 * 4.设置数据源
 * 5.执行流处理环境
 */
public class FlinkKafkaWriter {
    public static void main(String[] args) {
        //1.开启流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //2.设置并行度、chk、重启策略等参数
        env.setParallelism(1);
        //2.1.读取车辆 json 数据
        DataStreamSource<String> source = env
                .readTextFile("F:\\1.授课视频\\4-车联网项目\\05_深圳24期\\全部讲义\\2-星途车联网系统第二章-原始终端数据实时ETL\\原始数据\\sourcedata.txt");

        //3.创建FlinkKafkaProducer类
        //3.1.配置属性
        Properties props = new Properties();
        props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "node01:9092,node02:9092,node03:9092");
        props.setProperty(ProducerConfig.BATCH_SIZE_CONFIG, "5");
        props.setProperty(ProducerConfig.ACKS_CONFIG, "0");
        //props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.flink.api.common.serialization.SimpleStringSchema");

        //3.2.实例化FlinkKafkaProducer
        FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>(
                "vehicledata",
                new KafkaSerializationSchema<String>() {
                    @Override
                    public ProducerRecord<byte[], byte[]> serialize(String element, @Nullable Long timestamp) {
                        return new ProducerRecord(
                                "vehicledata",
                                element.getBytes()
                        );
                    }
                },
                props,
                FlinkKafkaProducer.Semantic.NONE
        );

        //4.设置数据源
        source.addSink(producer);
        //5.执行流处理环境
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
