package com.doit.sparksql.day01

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @DATE 2022/1/13/9:37
 * @Author MDK
 * @Version 2021.2.2
 * */
object SQL02 {
  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("wordcount")
      .getOrCreate()

    val df: DataFrame = spark.read.text("data/word/word.txt")
//    spark.read.textFile("data/word/word.txt")
    df.createTempView("t1")

    //根据表结构字段进行查找数据  先打印出整体的表结构  再根据表结构的情况分析想要获取的结果
   /* *spark.sql(
      """
//      |select
//      |*
//      |from
//      |t1
//      |
//      |""".stripMargin).show()


//      +--------------------+
//      |               value|
//      +--------------------+
//      |      hello ABC  com|
//      |   hello ABC jim com|
//      |hello scala spark...|
//      |hello hive hbase ...|
//      |  hello R hive scala|
+--------------------+*/
    spark.sql(
      """
        |select
        |word,
        |count(1) as cnt
        |from
        |(
        |select
        |explode(split(value, '\\s+')) as word
        |from
        |t1
        |)t
        |group by
        |word
        |""".stripMargin).show()

    spark.close()
  }
}
