package com.chb.flink.sink

import java.lang
import java.util.Properties

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaProducer, KafkaSerializationSchema}
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.StringSerializer

/**
 * Kafka Sink
 */
object KafkaSinkByKeyValue {
    def main(args: Array[String]): Unit = {
        val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
        streamEnv.setParallelism(1) //默认情况下每个任务的并行度为1
        import org.apache.flink.streaming.api.scala._

        //读取netcat流中数据 （实时流）
        val stream1: DataStream[String] = streamEnv.socketTextStream("10.0.0.201", 8888)

        //转换计算
        val result = stream1.flatMap(_.split(","))
            .map((_, 1))
            .keyBy(0)
            .sum(1)


        // 设置kafka配置
        val props = new Properties()
        props.setProperty("bootstrap.servers", "10.0.0.201:9092")

        val kafkaSink = new FlinkKafkaProducer[(String, Int)]("test", new KafkaSerializationSchema[(String, Int)] {
            override def serialize(t: (String, Int), aLong: lang.Long): ProducerRecord[Array[Byte], Array[Byte]] = {
                new ProducerRecord("test", t._1.getBytes(), (t._2 + "").getBytes())
            }
        }, props, FlinkKafkaProducer.Semantic.EXACTLY_ONCE)

        result.addSink(kafkaSink)

        streamEnv.execute()
    }
}