package com.hjs.homework

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 * 使用结构化流读取kafka中的数据
 */
object RealTimeProcess {
  def main(args: Array[String]): Unit = {
    //    System.setProperty("HADOOP_USER_NAME", "root")
    //1 获取sparkSession
    val spark: SparkSession = SparkSession.builder()
      .master("local[*]")
      .appName(RealTimeProcess.getClass.getName)
      .getOrCreate()
    spark.sparkContext.setLogLevel("WARN")
    import spark.implicits._
    //2 定义读取kafka数据源
    val kafkaDf: DataFrame = spark.readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "centos7kafkanode1:9092") //自己的kafka节点
      .option("subscribe", "hjs_bus_info") //kafka主题
      .load()
    //3 处理数据
    val kafkaValDf: DataFrame = kafkaDf.selectExpr("CAST(value AS STRING)")
    //转为ds
    val kafkaDs: Dataset[String] = kafkaValDf.as[String]
    //    kafkaDs.foreach(println(_))

    //解析出经纬度数据，写入redis
    //封装为一个case class方便后续获取指定字段的数据
    val busInfoDs: Dataset[BusInfo] = kafkaDs.map(BusInfo(_)).filter(_ != null)
    //把经纬度数据写入redis
    busInfoDs.writeStream
      .foreach(new MySQLWriter("jdbc:mysql://bogon:3306/lagou_homework?useSSL=false", "root", "han103849"))
      .outputMode("append")
      .start()
    //      .awaitTermination()

    busInfoDs.writeStream
      .format("console")
      .outputMode("append")
      .start()
    //      .awaitTermination()

    spark.streams.awaitAnyTermination()
  }
}
