package sink;

import com.alibaba.fastjson.JSON;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaSerializationSchemaWrapper;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkFixedPartitioner;
import org.apache.kafka.clients.producer.ProducerConfig;

import java.util.Properties;

/**
 * Author itcast
 * Date 2021/6/17 16:46
 * 需求： 将数据元素封装成 JSON字符串 生产到 Kafka 中
 * 步骤：
 *
 */
public class KafkaProducerDemo {
    public static void main(String[] args) throws Exception {
        //1.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //2.Source 生成一个元素 Student
        DataStreamSource<Student> studentDS = env.fromElements(new Student(104, "chaoxian", 25));
        //3.Transformation
        //注意:目前来说我们使用Kafka使用的序列化和反序列化都是直接使用最简单的字符串,所以先将Student转为字符串
        //3.1 map 方法 将 Student转换成字符串
        SingleOutputStreamOperator<String> mapDS = studentDS.map(new MapFunction<Student, String>() {
            @Override
            public String map(Student value) throws Exception {
                //可以直接调用JSON的toJsonString,也可以转为JSON
                String json = JSON.toJSONString(value);
                return json;
            }
        });

        //4.Sink
        Properties props = new Properties();
        props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.88.161:9092");
        //根据参数实例化 FlinkKafkaProducer
        //4.1如果不需要复杂的参数设置，只需要将数据存储到 kafka 消息队列中，使用第一个重载方法
        //  如果需要设置复杂的 kafka 的配置的时候， 使用除了第一个之外的重载方法
        //  如果需要设置仅一次语义 Semantic ，可以使用最后两个
        /*FlinkKafkaProducer producer = new FlinkKafkaProducer(
                "192.168.88.161:9092,192.168.88.162:9092,192.168.88.163:9092",
                "flink_kafka",
                new SimpleStringSchema()
        );*/
        FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<String>(
                "flink_kafka",
                new KafkaSerializationSchemaWrapper(
                        "flink_kafka",
                        new FlinkFixedPartitioner(),
                        false,
                        new SimpleStringSchema()
                ),
                props,
                //支持仅一次语义的方式进行提交数据
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE
        );

        mapDS.addSink(producer);
        // ds.addSink 落地到kafka集群中
        //5.execute
        env.execute();
        //测试 /export/server/kafka/bin/kafka-console-consumer.sh --bootstrap-server node1:9092 --topic flink_kafka
    }

    @Data
    @NoArgsConstructor
    @AllArgsConstructor
    public static class Student {
        private Integer id;
        private String name;
        private Integer age;
    }
}
