package com.example.kafka

import com.example.bean.BusInfo
import com.example.monitor.MysqlWriter
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 * @author lbq
 * @date 2022/3/13 16:33
 * @version 1.0
 */
object StructuredKafka {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(StructuredKafka.getClass.getName)
    conf.setMaster("local[*]")
    val spark = SparkSession.builder().config(conf).getOrCreate()
    spark.sparkContext.setLogLevel("warn")

    import spark.implicits._
    val kafkaDf: DataFrame = spark.readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "aliyun:9092")
      .option("subscribe", "spark_kafka")
      .load()
    //3 处理数据
    val kafkaValDf: DataFrame = kafkaDf.selectExpr("CAST(value AS STRING)")
    //转为ds
    val kafkaDs: Dataset[String] = kafkaValDf.as[String]
    val busInfo: Dataset[BusInfo] = kafkaDs.map(BusInfo(_))

    busInfo.writeStream
      .foreach(new MysqlWriter)
      .outputMode("append")
      .start()
      .awaitTermination()


  }
}
