package com.shujia.sql

import org.apache.spark.sql.functions.count
import org.apache.spark.sql.{DataFrame, SparkSession}

object Code07WordCount {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = GetSpark.spark("wordCount")
    val dataFrame: DataFrame = spark
      .read
      .format("csv")
      .option("sep", "|")
      .schema("word String")
      .load("spark_code/data/word")


    import spark.implicits._
    import org.apache.spark.sql.functions.split
    import org.apache.spark.sql.functions.explode

    // DSL语法
    dataFrame
      .select(explode(split($"word",",")) as "words")
      .groupBy("words")
      .agg(count("*") as "cnt")
      .show()

    // SQL

    // 创建表
    dataFrame
      .createTempView("word_tbl")

    // 查询表中的数据
    val resDataFrame: DataFrame = spark.sql(
      """
        |
        |SELECT
        |words
        |,count(*) as cnt
        |FROM (
        |SELECT
        | explode(split(word,',')) as words
        |FROM word_tbl) t1
        |group by words
        |""".stripMargin)

    resDataFrame.show()


  }
}
