package com.gitee.sink

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializerFeature
import com.gitee.source.practice.CustomerSourceKafka
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011

import scala.beans.BeanProperty
/*
  TODO kafka的Sink
 */
object KafkaSink {
  def main(args: Array[String]): Unit = {
    val senv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    //数据准备
    import org.apache.flink.api.scala._
    val data: DataStream[Student] = senv.fromElements(Student(8, "tony", 18))

    val topic = "mytopic" //主题

    val kafkaProducer = new FlinkKafkaProducer011[String](topic, new SimpleStringSchema(), CustomerSourceKafka.kafkaConf)
    val json: DataStream[String] = data.map(st => {
      //使用对象转换成JSON字符串的时候回,对象要实现get/set方法
      JSON.toJSONString(st, SerializerFeature.DisableCircularReferenceDetect)
    })
    json.print()
    json.addSink(kafkaProducer)
    senv.execute()
  }

  case class Student(
                      @BeanProperty id: Int,
                      @BeanProperty name: String,
                      @BeanProperty age: Int)

}
