package cn.bigdata.structstreaming

import cn.bigdata.structstreaming.job.MysqlWriter
import org.apache.spark.sql.{Column, DataFrame, SparkSession}


object RealTimeProcess {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local")
      .appName(this.getClass.getName.init)
      .getOrCreate()
    spark.sparkContext.setLogLevel("WARN")
    import spark.implicits._

    val kafkaDF: DataFrame = spark.readStream.format("kafka")
      .option("kafka.bootstrap.servers", "centos3:9092")
      .option("subscribe", "lagou_bus_info")
      .load()

    val kafkaValDF = kafkaDF.selectExpr("CAST(value as STRING)")
    val kafkaDS = kafkaValDF.as[String]

    val busInfoDS = kafkaDS.map(msg => {
      BusInfo(msg)
    })


    // 实现对异常车辆情况告警
    // 剩余油量小于30
    busInfoDS.filter(_.oilRemain.toInt < 30)


    // 写入到kafka
    // 写出的df 中必须有一个列名叫value
    busInfoDS.withColumn("value", new Column("deployNum")).writeStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "centos3:9092")
      .option("topic", "lg_bus_warn_info")
      .option("checkpointLocation", "./ck")
      .outputMode("append")
      .start()

    val mysqlWriter = new MysqlWriter

    busInfoDS
      .writeStream.foreach(mysqlWriter)
      .outputMode("append")
      .start().awaitTermination()

//    val writer = new RedisWriter


    // 数据写入到redis
//    busInfoDS
//      .writeStream.foreach(writer)
//      .outputMode("append")
//      .start()
//      .awaitTermination()
  }

}
