package com.ywps.vaas.framework.util

import com.ywps.vaas.framework.conf.ConfigTools
import com.ywps.vaas.framework.constant.PropertiesConstant
import org.apache.spark.sql.DataFrame
import org.slf4j.LoggerFactory

import java.util.Properties


object KafkaUtil {
  val logger = LoggerFactory.getLogger(this.getClass)
  val properties: Properties = ConfigTools.getPropertiesFactory()
  val servers: String = properties.getProperty(PropertiesConstant.KAFKA_BOOTSTRAP_SERVERS)

  /**
   * 静态数据写入kafka
   * @param dataFrame 静态数据集
   * @param topic 主题
   */
  def writeDfToKafka(dataFrame: DataFrame,topic:String): Unit ={
    dataFrame.toJSON.write
      .format(PropertiesConstant.KAFKA_SOURCE)
      .option(PropertiesConstant.KAFKA_BOOTSTRAP_SERVERS,servers)
      .option(PropertiesConstant.KAFKA_TOPIC,topic)
      .save()
  }

  /**
   * 数据流写入kafka
   * @param dataFrame 数据流
   * @param topic 主题
   */
  def writeStreamDfToKafka(dataFrame: DataFrame,topic:String): Unit ={
    dataFrame.toJSON.writeStream
      .format(PropertiesConstant.KAFKA_SOURCE)
      .option(PropertiesConstant.KAFKA_BOOTSTRAP_SERVERS,servers)
      .option(PropertiesConstant.KAFKA_TOPIC,topic)
      .start()
  }
}
