package flink_p1

import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaProducer, KafkaSerializationSchema}
import org.apache.kafka.clients.producer.{ProducerConfig, ProducerRecord}

import java.lang
import java.util.Properties

object FlinkTest_12_Sink_Kafka {


  def main(args: Array[String]): Unit = {


    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment


    val socketStream: DataStream[String] = env.socketTextStream("127.0.0.1", 8889)


    val prop = new Properties()
    prop.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092")
    //    prop.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
    //    prop.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)


    val topic = "kafka-sink-test"

    val flinkKafkaProducer: FlinkKafkaProducer[(String, Int)] = new FlinkKafkaProducer[(String, Int)](
      topic,
      new KafkaSerializationSchema[(String, Int)]() {
        override def serialize(t: (String, Int), aLong: lang.Long): ProducerRecord[Array[Byte], Array[Byte]] = {
          new ProducerRecord[Array[Byte], Array[Byte]](topic, t._1.getBytes(), t._2.toString.getBytes())
        }
      },
      prop,
      FlinkKafkaProducer.Semantic.EXACTLY_ONCE
    )

    socketStream
      .map((_, 1))
      .keyBy(0)
      .sum(1)
      .addSink(flinkKafkaProducer)


    env.execute("kafka test")


  }

}
