package com.shengzai.sql

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object Demo2WorldCountSQL {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .config("spark.sql.shuffle.partitions", 1)
      .appName("worldCount")
      .master("local")
      .getOrCreate()

    val worldDF: DataFrame = spark.read
      .format("csv")
      .option("sep", "|")
      .schema("line STRING")
      .load("data/worlds.txt")

    worldDF.createOrReplaceTempView("tmp")
    val sql: String =
      """
        |select
        |world
        |,count(*) as num
        |from(
        |select
        |explode(split(line," ")) as world
        |from
        |tmp) t1
        |group by world
        |""".stripMargin

    val resDF: DataFrame = spark.sql(sql)
    resDF.write
      .format("csv")
      .option("sep",",")
      .mode(SaveMode.Overwrite)
      .save("data/worldCount2")
  }

}
