package com.sugon.ww

import java.util

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.{DataFrame, SparkSession}


object kafkaProducer {

  // Zookeeper connection properties
  private val props = new util.HashMap[String, Object]()
  props.put("bootstrap.servers", "slave03:6667")
  props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
  props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
  private val producer = new KafkaProducer[String, String](this.props)


  def main(args: Array[String]): Unit = {

    if (args.length < 2) {
      throw new IllegalArgumentException("输入参数错误,参数依次为 '表名【库名.表名】topic kafka服务器地址'")
    }
    // 输入参数
    val tableName = args(0)
    //    val kafkaServer = args(2)
    val topicName = args(1)


    val spark: SparkSession = SparkSession.builder().appName("hiveToKafka_" + tableName + ":" + topicName)
      //      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()


    val resultDF: DataFrame = spark.table(tableName)

    resultDF.withColumn("abc", lit("202009111"))

    resultDF.write.format("orc").mode("overwrite").save("/sss")
    resultDF.toJSON.foreach(
      (str: String) => {
        kafkaProducerSend(str, topicName)
      }
    )

    def kafkaProducerSend(args: String, topicName: String) {
      if (args != null) {
        val topic: String = topicName
        val message = new ProducerRecord[String, String](topic, null, args)
        producer.send(message)
      }
    }

  }


}
