import org.apache.spark.sql.{Dataset, SparkSession}

object RealTimeProcess {

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder().appName(RealTimeProcess.getClass.getCanonicalName)
      .master("local[*]")
      .getOrCreate()
    sparkSession.sparkContext.setLogLevel("WARN")

    //定义kafka数据源
    val kafkaDf = sparkSession.readStream.format("kafka")
      .option("kafka.bootstrap.servers", "linux121:9092")
      .option("subscribe", "lg_bus_info")
      .load()

    //处理数据
    import sparkSession.implicits._
    //转为ds
    val kafkaDs: Dataset[String] = kafkaDf.selectExpr("CAST(value AS STRING)").as[String]
    //解析出经纬度
    val busInfoDs: Dataset[BusInfo] = kafkaDs.map(BusInfo(_))
    //把经纬度数据写入mysql
    busInfoDs.writeStream.foreach(new MySqlForeachWriter())
      .outputMode("append")
      .start()
      .awaitTermination()

  }


}
