package wanda.modes

import org.apache.spark.sql.streaming.Trigger
import org.apache.spark.sql.{SparkSession}
import wanda.cnstants.Constants
import wanda.kafka.KafkaHelper
import wanda.topologies.SparkTopoContext
import wanda.topologies.bases.{SparkTopology}
import wanda.util.Checker

object GenericMethod {

  lazy val log = org.apache.log4j.LogManager.getLogger("GenericMethod")

  def initialize[T <: SparkTopology](args: Array[String]): Unit = {

    val Array(proPath) = args

    val stc = new SparkTopoContext(proPath)

    val topoName = stc.get(Constants.CLASS_NAME)

    val topics = stc.get(Constants.TOPICS)

    val brokerList = stc.get(Constants.BROKERS)

    val maxOffsetsPerTrigger = stc.get(Constants.OFFSETSPERTRIGGER)

    val basePath = stc.get(Constants.LOCATIONPATH)

    val interval = stc.get(Constants.INTERVAL)

    val checkpointLocation = stc.get(Constants.CHECKPOINTPATH)

    // val primaryKey = stc.get(Constants.PRIMARYKEY)

    val partition = stc.get(Constants.PARTITION)

    val deletePartitionMode = stc.get(Constants.DELETEMODE)

    val rangs = stc.get(Constants.RANGE)

    val spark: SparkSession = SparkSession.builder.appName("Hoodie Spark Streaming APP")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .master("local[3]")
      .getOrCreate


    val df = spark.readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", brokerList)
      .option("subscribe", topics)
      .option("maxOffsetsPerTrigger", maxOffsetsPerTrigger)
      .load()

    Checker.showMap(stc.getMap)

    KafkaHelper.initialize(basePath, partition, deletePartitionMode, rangs)

    val messagesDStream = KafkaHelper.StructuredFromKafka(spark, df)

    messagesDStream.writeStream.format("delta")
      .foreachBatch(KafkaHelper.upsertToDelta _)
      .trigger(Trigger.ProcessingTime(interval))
      .outputMode("update")
      .option("checkpointLocation", checkpointLocation)
      .start()
      .awaitTermination()

  }

}
