package com.shujia.stream

import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo4StructuredStreaming {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .master("local[2]")
      .appName("ssc")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    /**
      * 读取socket
      *
      */

    val ds: DataFrame = spark
      .readStream
      .format("socket")
      .option("host", "192.168.129.101")
      .option("port", 8888)
      .load()


    /**
      * 默认就是有状态计算
      *
      */

    val countDF = ds
      .groupBy($"value")
      .agg(count("value") as "c")


    countDF
      .writeStream
      .format("console")
      .outputMode(OutputMode.Update())
      .start() //启动
      .awaitTermination() //等待关闭


  }

}
