package com.shujia.sql

import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo04WordCount {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.replace("$", ""))
      .master("local")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val wordsDF: DataFrame = spark.read.format("csv").option("sep", "|").schema("line String")
      .load("spark/data/words/words.txt")


    // 基于words.txt 统计的单词的数量
    // 使用DSL的方式
    wordsDF
      .select(explode(split($"line", ",")) as "word")
      .groupBy($"word")
      .agg(count("*") as "cnt")
      .show()

    // 将DF注册成临时视图
    wordsDF.createOrReplaceTempView("words_tb")
    // 使用SQL的方式
    spark.sql(
      """
        |select  t1.word
        |        ,count(*) as cnt
        |from (
        |    select explode(split(line,",")) as word
        |    from words_tb
        |) t1 group by t1.word
        |""".stripMargin).show()
  }

}
