package com.tech

import com.tech.common.KafkaUtils
import com.tech.config.ApplicationConfig
import com.tech.customer.{LoadResourceManager, MyForeachWriter, MyStreamingQueryListener}
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * 公司:天正聚合
 * 文件名称:Streaming
 * 作者:zhoumindong
 * 时间:2021/7/6
 */
object Streaming {
  def main(args: Array[String]): Unit = {
    //订阅的topic，有多个时中间用逗号隔开
    val topics = args(0)

    val spark = SparkSession.builder()
      .appName(topics)
      .config("spark.dynamicAllocation.enabled", value = false)
      //      .master("local[*]")
      .getOrCreate()
    val manager = new LoadResourceManager()
    manager.loadOnce(spark)
    manager.load(spark)
    spark.streams.addListener(new MyStreamingQueryListener(spark, manager))
    val df: DataFrame = spark.readStream.format("kafka")
      .option("kafka.bootstrap.servers", ApplicationConfig.KAFKA_BROKER_LIST)
      .option("subscribe", topics)
      // {\"retailevent_uat\":{\"0\":1390,\"1\":1390,\"2\":1390,\"3\":1390}}
      .option("startingOffsets", if (ApplicationConfig.KAFKA_STARTING_OFFSETS.equalsIgnoreCase("kudu")) KafkaUtils.getOffsetFromKudu(topics) else ApplicationConfig.KAFKA_STARTING_OFFSETS)
      .option("maxOffsetsPerTrigger", ApplicationConfig.KAFKA_MAX_OFFSETS_PER_TRIGGER)
      .load()

    import spark.implicits._
    val query = df
      .selectExpr("CAST(value AS STRING)", "CAST(topic AS STRING)", "CAST(partition AS int)", "CAST(offset AS long)").as[(String, String, Int, Long)]
      .writeStream.format("foreach")
      .outputMode(OutputMode.Append).foreach(new MyForeachWriter(manager))
      .start()
    query.awaitTermination()
    query.stop()
  }
}
