package com.sunzm.spark.sql.exercise

import org.apache.spark.sql.{DataFrame, SparkSession}

object SparkSQLExercise {
  def main(args: Array[String]): Unit = {
    //Logger.getLogger("org").setLevel(Level.WARN)

    val spark: SparkSession = SparkSession
      .builder()
      .appName(s"${this.getClass.getSimpleName.stripSuffix("$")}")
      .master("local[*]")
      .config("spark.default.parallelism", 8)
      .config("spark.sql.shuffle.partitions", 8)
      .getOrCreate()

    //基本查询
    basicQuery(spark)

    spark.close()
  }

  //基本查询
  def basicQuery(spark: SparkSession) = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    //订单数据
    val orderDF: DataFrame = spark.read.json("data/spark/sql/order.txt")
    //打印表结构
    orderDF.printSchema()
    //注册成视图
    orderDF.createOrReplaceTempView("v_order")

    //查询每个省份的订单总数和总金额
    spark.sql(
      """
        |SELECT provId, COUNT(DISTINCT orderId) AS orderCount, SUM(price) AS totalPrice
        | FROM v_order
        | GROUP BY provId
        |""".stripMargin)
      .createOrReplaceTempView("v1")
    //关联省份名称
    //省份数据
    val provDF: DataFrame = spark.read.json("data/spark/sql/prov.txt")
    //注册成视图
    provDF.createOrReplaceTempView("v_prov")
    spark.sql(
      """
        |SELECT v2.provName, v1.orderCount, v1.totalPrice
        | FROM v1 JOIN v_prov v2
        | ON v1.provId = v2.provId
        |""".stripMargin)
      .show()
  }

}
