package com.event.streaming.common.straming
import com.event.streaming.config.setttings.StreamingConsumerSettings
import org.apache.spark.sql.streaming.Trigger.ProcessingTime
import org.apache.spark.sql.{ForeachWriter, Row, SparkSession}

@SerialVersionUID(202007171627L)
class StructureStreamingConsumer extends Serializable { self:StreamingConsumerSettings =>

  //输出模型
  //the output mode
  object OutputMode extends Enumeration {
    type OutputMode = Value
    val append, update, complete = Value
  }

  //入口
  def start(spark:SparkSession,topic:String,sink:ForeachWriter[Row],mode:OutputMode.Value):Unit = {
    //load
    val df = spark.readStream.format("kafka").option("kafka.boostrap.servers",kafkaBrokerUrl).option("subscribe",topic).load()

    val output = mode match {
      case OutputMode.append => "append"
      case OutputMode.update => "update"
      case _ => "complete"
    }

    val query  = process(df.selectExpr("CAST(key AS String)","CAST(value AS STRING)"))
      .writeStrem
      .foreach(sink).putputMode(output)
      .trigger(ProcessingTime(this.streamingInterval * 2000L))
      .start()
    query.awaitTermination()
  }



}
