package com.lifq.flink.streaming

import com.lifq.flink.source.{DataGenerator, MockDataGenerator}
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.connector.base.DeliveryGuarantee
import org.apache.flink.connector.kafka.sink.{KafkaRecordSerializationSchema, KafkaSink, KafkaSinkBuilder}
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, createTypeInformation}

/**
 * @Author: LiFuqiang
 * @Date: 2023/4/10 10:44
 * @Version 1.0 
 * @description
 */
object ProduceDataToKafka {
  def main(args: Array[String]): Unit = {
    // Checking input parameters
    val paramsCC = ParameterTool.fromArgs(args)

    val brokers = paramsCC.get("brokers")

    val topic = paramsCC.get("topic")

    val group = paramsCC.get("group")

    val env = StreamExecutionEnvironment.getExecutionEnvironment

    val source = env.addSource(new MockDataGenerator())

    val kafkaSink = KafkaSink.builder[String]()
      .setBootstrapServers(brokers)
      .setRecordSerializer(KafkaRecordSerializationSchema.builder()
        .setTopic(topic)
        .setValueSerializationSchema(new SimpleStringSchema())
        .build()
      ).setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
      .build();

    source.sinkTo(kafkaSink)

    env.execute("kafka-data-produce-test")
  }

}
