package cn.smileyan.demos;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.java.utils.MultipleParameterTool;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.producer.ProducerConfig;

import java.util.Properties;

/**
 * flink 使用 kafka 作为数据源的简单例子
 * @author Smileyan
 */
@Slf4j
public class FlinkKafkaSinkExample {
    /**
     * 参数解释：
     *  -bs broker 地址
     *  -kcg kafka consumer group
     *  -it kafka 输入数据 topic
     *  -ct 是否自动创建 topic
     *  -pt topic 分区数
     *  -rf topic 副本数
     */
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        final MultipleParameterTool cmd = MultipleParameterTool.fromArgs(args);
        final String bootstrapServer = cmd.get("bs", "localhost:9092");
        final String outputTopic = cmd.get("ot", "quickstart-results");
        final Long transactionTimeout = cmd.getLong("tt", 300000L);

        DataStream<Student> dataStream = env.fromElements(
                new Student(1, "张三", 18),
                new Student(2, "李四", 20),
                new Student(3, "王五", 22)
        );

        Properties properties = new Properties();
        properties.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, String.valueOf(transactionTimeout));
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);

        final KafkaSink<Student> kafkaSink = KafkaSink.<Student>builder()
                .setKafkaProducerConfig(properties)
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        .setTopic(outputTopic)
                        .setValueSerializationSchema(new CommonEntitySchema<>(Student.class))
                        .build())
                .setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
                .build();

        log.info("Starting sink job");
        dataStream.sinkTo(kafkaSink);
        log.info("Finished sink job");

        env.execute("Flink Kafka Example");
    }

    @Data
    @NoArgsConstructor
    @AllArgsConstructor
    static class Student {
        private Integer id;
        private String name;
        private Integer age;
    }
}
