import org.apache.spark.SparkContext
import org.apache.spark.sql.streaming.Trigger
import org.apache.spark.sql.{Dataset, SparkSession}

object StructuredStreamBusinfo {
  def main(args: Array[String]): Unit = {
    val spark:SparkSession = SparkSession.builder()
      .master("local[*]")
      .appName("SparkSQLWC")
      .getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")

    val kafkaDataDF = spark.readStream
      .option("kafka.bootstrap.servers", "linux121:9092,linux122:9092")
      .option("subscribe", "topic1")
      .option("startingOffsets","latest")
      .format("kafka")
      .load()

/*    print(dataDF.isStreaming)
    dataDF.printSchema()*/
    import spark.implicits._
    val dataDF = kafkaDataDF.selectExpr("CAST(value AS STRING)") // kafka 特殊的格式转化
    val dataDS: Dataset[String] = dataDF.as[String]
    val result: Dataset[BusInfo] = dataDS.filter(msg=>msg.split(",").length==15).map(msg => BusInfo(msg))

    result.writeStream
      .foreach(new MysqlWriter)
      .outputMode("append")
      .start()
      .awaitTermination()
  }
}
