package com.sugon.staticcs

import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.spark.sql.{DataFrame, SparkSession}


object kafkaProducer {

  def main(args: Array[String]): Unit = {

    if (args.length < 3) {
      throw new IllegalArgumentException("输入参数错误,参数依次为 '表名【库名.表名】topic kafka服务器地址'")
    }
    // 输入参数
    val tableName = args(0)
    val topicName = args(1)
    val kafkaServer = args(2)

    val spark: SparkSession = SparkSession.builder().appName("hiveToKafka_" + tableName + ":" + topicName)
      //      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()

    KafkaUtil.init(kafkaServer)

    val resultDF: DataFrame = spark.table(tableName)

    resultDF.toJSON.repartition(3).foreach(
      (s: String) => {
        val message = new ProducerRecord[String, String](topicName, null, s)
        KafkaUtil.getProducer.send(message)
      }
    )

  }


}
