package com.bbx.flink.kafkaDemo;


import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

@Slf4j
public class SinkToKafka {


    /**
     * @param args
     */
    public static void main(String[] args) throws Exception {
        //文件内容
        //1111
        //2222
        //3333
        //4444
        //5555
        //6666
        //7777
        //8888
        //9999
        //0000
        //1234
        //2234
        //3234
        //4234
        //5678
        String filePath = "C:\\Users\\kaifacs\\Desktop\\flinkToKafke.txt";
        //获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //获取数据源
        DataStreamSource<String> source = env.readFile(new TextInputFormat(new Path(filePath)), filePath)
                .setParallelism(1);

        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        KafkaSerializationSchema<String> schema = new KafkaSerializationSchema<String>() {
            private static final long serialVersionUID = -8159262134473166099L;

            @Override
            public ProducerRecord<byte[], byte[]> serialize(String s, @Nullable Long aLong) {
                return new ProducerRecord<>("bbx-flink-topic", s.getBytes(StandardCharsets.UTF_8));
            }
        };


        FlinkKafkaProducer flinkKafkaProducer = new FlinkKafkaProducer<>("bbx-flink-topic",
                schema,properties,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE);

        source.addSink(flinkKafkaProducer);
        env.execute("flink_kafka");
        log.info("[{}]", "start to sink to kafka");
    }





}
