package org.yuanzheng.sink

import java.lang
import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaProducer, KafkaSerializationSchema}
import org.apache.kafka.clients.producer.ProducerRecord

/**
 * @author yuanzheng
 * @date 2020/6/16-21:02
 */
object KafkaKeyValueSink {
  def main(args: Array[String]): Unit = {
    // 1.环境配置
    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    streamEnv.setParallelism(1)
    import org.apache.flink.streaming.api.scala._
    // 2.数据源
    val stream: DataStream[String] = streamEnv.socketTextStream("192.168.1.9", 8888)
    // 3.数据处理
    val result: DataStream[(String, Int)] = stream.flatMap(_.split(" ")).map((_, 1)).keyBy(0).sum(1)
    /*Kafka Sink配置*/
    // 4.kafka连接配置
    val props = new Properties()
    props.setProperty("bootstrap.servers", "192.168.1.8:9092")
    // 5.kafka sink
    val kafkaSink = new FlinkKafkaProducer[(String, Int)]("yuan", new KafkaSerializationSchema[(String, Int)] {
      override def serialize(t: (String, Int), aLong: lang.Long): ProducerRecord[Array[Byte], Array[Byte]] = {
        new ProducerRecord("yuan", t._1.getBytes(), (t._2 + "").getBytes())
      }
    }, props, FlinkKafkaProducer.Semantic.EXACTLY_ONCE)

    result.addSink(kafkaSink)
    streamEnv.execute()
  }
}
