package cn.itcast.flink.base;

import com.alibaba.fastjson.JSON;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaContextAware;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaSerializationSchemaWrapper;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkFixedPartitioner;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.util.Properties;

/**
 * Author itcast
 * Date 2021/7/27 17:12
 * Desc TODO
 */
public class KafkaProducerConnector {
    public static void main(String[] args) throws Exception {
        //1.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //2.Source
        DataStreamSource<Student> studentDS = env.fromElements(new Student(2, "jackma", 23));
        //3.Transformation  protobuf
        //注意:目前来说我们使用Kafka使用的序列化和反序列化都是直接使用最简单的字符串,所以先将Student转为字符串
        //3.1 map 方法 将 Student转换成字符串
        SingleOutputStreamOperator<String> studentStrDataStream = studentDS.map(new MapFunction<Student, String>() {
            @Override
            public String map(Student value) throws Exception {
                //可以直接调用JSON的toJsonString,也可以转为JSON
                String studentStr = JSON.toJSONString(value);
                return studentStr;
            }
        });

        //4.Sink
        //设置当前的参数属性
        Properties props = new Properties();
        props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"node1:9092,node2:9092,node3:9092");
        props.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG,1000*60*5+"");
        //根据参数实例化 FlinkKafkaProducer
        /*FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<String>(
                "flink-kafka",
                new SimpleStringSchema(),
                props
        );*/
        FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<String>(
                "flink-kafka",
                new KafkaSerializationSchemaWrapper(
                        "flink-kafka",
                        new FlinkFixedPartitioner(),
                        false,
                        new SimpleStringSchema()
                ),
                props,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE
        );
        // ds.addSink 落地到kafka集群中
        studentStrDataStream.addSink(producer);
        //5.execute
        env.execute();
        //测试 /export/server/kafka/bin/kafka-console-consumer.sh --bootstrap-server node1:9092 --topic flink_kafka
    }

    @Data
    @NoArgsConstructor
    @AllArgsConstructor
    public static class Student {
        private Integer id;
        private String name;
        private Integer age;
    }
}
