package com.app.structure

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}



object StructureStram {

  def main(args: Array[String]): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._
    import org.apache.spark.sql.streaming.StreamingQuery

    val spark: SparkSession = SparkSession.builder.appName("worldcont").master("local[*]").getOrCreate()

  val df = spark
    .read
    .format("kafka")
    .option("kafka.bootstrap.servers", "host1:port1,host2:port2")
    .option("subscribe", "zt.hiscene.ga")
    .load()


    val lines: Dataset[(String, String)] = df.selectExpr("CAST(key AS STRING)", "CAST(value AS STRING)")
      .as[(String, String)]


    val words: Dataset[String] = lines.as[String].flatMap(_.split(" "))

    val wordCounts: DataFrame = words.groupBy("value").count()
    val query: StreamingQuery = wordCounts.writeStream
      .outputMode("complete")
      .format("console")
      .start()

    query.awaitTermination()
  }
}
