package zy.learn.demo.structuredstreaming.basic

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.streaming.Trigger

/**
 * 先启动应用，然后马上在cmd中运行 nc64 -l -p 9999
 * 待应用启动好后，可以在cmd中输入字符验证应用了
 */
object WordCount2 {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    val sparkConf = new SparkConf().set("spark.sql.shuffle.partitions", "3")

    val spark = SparkSession.builder()
      .master("local[2]")
      .config(sparkConf)
      .appName("WordCount3").getOrCreate()

    // 1.从数据源加载数据
    val lines = spark.readStream
      .format("socket")
      .option("host", "localhost")
      .option("port", 9999)
      .load()

    import spark.implicits._

    // 2. 聚合计算
    val wordCount = lines.as[String].flatMap(_.split(" "))
      .groupBy("value").count()

    // 3. 输出
    val result = wordCount.writeStream
      .format("console")
      .outputMode("update")
      .trigger(Trigger.ProcessingTime("2 seconds"))
      .start()

    result.awaitTermination()
  }
}
