package com.gitee.lirenqing.flinksimpledatakafkasink;


import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.io.InputStream;
import java.util.Properties;

public class FlinkSimpleDataKafkaSinkApplication {

    public static void main(String[] args) throws Exception {
        InputStream inputStream = FlinkSimpleDataKafkaSinkApplication.class.getResourceAsStream("");
//        Properties props = new Properties();
//        try {
//            props.load(inputStream);
//        } catch (IOException e) {
//            e.printStackTrace();
//        }

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        Properties properties = new Properties();
//        properties.put("bootstrap.servers", "localhost:9092");
//        properties.put("acks", "all");
//        properties.put("retries", 0);
//        properties.put("batch.size", 16384);
//        properties.put("linger.ms", 1);
//        properties.put("buffer.memory", 33554432);
//        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//        properties.put("group.id", "book1");
        DataStreamSource<String> dataStreamSource = env.addSource(new SimpleSource()).setParallelism(1);
        dataStreamSource.addSink(new RichSinkFunction<String>() {

            private KafkaProducer<String, String> kafkaProducer = null;
            private String topic = "book";

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                Properties properties = new Properties();
                properties.setProperty("bootstrap.servers", "localhost:9092");
                properties.setProperty("group.id", "test_group");
                properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
                properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
                kafkaProducer = new KafkaProducer<String, String>(properties);
            }

            @Override
            public void invoke(String value, Context context) throws Exception {
                ProducerRecord<String, String> stringProducerRecord = new ProducerRecord<>(topic, value);
                kafkaProducer.send(stringProducerRecord);
                System.out.println("kafka send:" + value);
                kafkaProducer.flush();
            }

            @Override
            public void close() throws Exception {
                if (kafkaProducer != null) {
                    kafkaProducer.flush();
                    kafkaProducer.close();
                }
                super.close();
            }
        });

        //FlinkKafkaProducer<String> producer = new FlinkKafkaProducer("book", new SimpleStringSchema(), properties);
        //  producer.setWriteTimestampToKafka(true);
        //  dataStreamSource.addSink(producer);
        env.execute("data kafka sink");
    }

}
