package dataframe

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object DataFrame_SQLDemo_WordCount {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName("DataFrame_SQLDemo_WordCount")

    val spark: SparkSession = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()

    import spark.implicits._

    // 使用DSL API实现单词计数
    val lineDS: Dataset[String] = spark.read.textFile("data/words.txt")
    val wordDS: Dataset[String] =
      lineDS
      .flatMap(line => line.split("\\s+"))

    wordDS
      .filter(_.size > 0)
      .groupByKey(word => word.toLowerCase())
      .count()
      .toDF("word","count")
      .orderBy($"count".desc)
      .show(3)


    // 使用SQL语句
    val wordDF: DataFrame = spark
      .read
      .textFile("data/words.txt")
      .flatMap(line => line.split("\\s+"))
      .filter(_.size > 0)
      .toDF("word")
    wordDF.createOrReplaceTempView("tb_word")
    spark.sql(
      """
        |select word,count(word) as count
        |from tb_word
        |group by word
        |order by count desc
        |""".stripMargin)
      .show(3)


    spark.stop()
  }
}
