package com.sugon.tokafka

import java.util.Properties

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.{DataFrame, SparkSession}


object KafkaMain {

  def main(args: Array[String]): Unit = {

    if (args.length < 3) {
      throw new IllegalArgumentException("输入参数错误,参数依次为 '表名【库名.表名】topic kafka服务器地址'")
    }
    // 输入参数
    val tableName = args(0)
    val kafkaServer = args(2)
    val topicName = args(1)

    val spark: SparkSession = SparkSession.builder().appName("hiveToKafka_" + tableName + ":" + topicName)
      //      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()


    val resultDF: DataFrame = spark.table(tableName)


    val kafkaProducer: Broadcast[KafkaSink[String, String]] = {
      val kafkaProducerConfig: Properties = {
        val p = new Properties();
        p.setProperty("bootstrap.servers", kafkaServer)
        p.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
        p.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
        p
      }
      spark.sparkContext.broadcast(KafkaSink[String, String](kafkaProducerConfig))
    }

    resultDF.toJSON.foreach(
      str => {
        kafkaProducer.value.send(topicName, str)
      }
    )
  }
}
