package org.wj.streaming

import org.apache.spark.SparkContext
import org.apache.spark.sql.streaming.{StreamingQuery, Trigger}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.wj.accumulator.User

object UserTest extends App {

  private val spark: SparkSession = SparkSession.builder().master("local").appName(this.getClass.getName).getOrCreate()
  spark.sparkContext.setLogLevel("WARN")


  import spark.implicits._

  private val userSchema: StructType = StructType(List(StructField("name", StringType, nullable = false), StructField("sex", StringType, nullable = false), StructField("age", IntegerType, nullable = false)))

  private val frame: DataFrame = spark.readStream.format("csv").schema(userSchema).load("P:\\Project\\Idea\\bigdata\\spark\\src\\main\\resources\\streaming")

  frame.createTempView("t_user")

  private val userDataSet: Dataset[User] = spark.sql("select * from t_user").as[User]

  private val query: StreamingQuery = userDataSet.writeStream.outputMode("append").trigger(Trigger.ProcessingTime(0)).option("maxFilesPerTrigger", 1).format("console").start()

  query.awaitTermination()


}
