package com.xbai.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, SparkSession}

/**
  * 每个订单可能包含多个货品，每个订单可以产生多次交易，不同的货品有不同的单价。
  *
  * @author xbai
  * @Date 2021/1/7
  */
object SparkSQL_Practice {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("practice")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    import spark.implicits._

    val tbDateRDD: RDD[String] = spark.sparkContext.textFile("in/tbDate.txt")
    val tbDateDS: Dataset[TbDate] = tbDateRDD.map(_.split(",")).map(attr =>
      TbDate(attr(0), attr(1).toInt, attr(2).toInt, attr(3).toInt, attr(4).toInt, attr(5).toInt, attr(6).toInt, attr(7).toInt, attr(8).toInt, attr(9).toInt)).toDS()
    tbDateDS.show()
    tbDateDS.createOrReplaceTempView("date")

    val tbStockRDD: RDD[String] = spark.sparkContext.textFile("in/tbStock.txt")
    val tbStockDS: Dataset[TbStock] = tbStockRDD.map(_.split(",")).map(attr =>
      TbStock(attr(0), attr(1), attr(2))).toDS()
    tbStockDS.show()
    tbStockDS.createOrReplaceTempView("stock")

    val tbStockDetailRDD: RDD[String] = spark.sparkContext.textFile("in/tbStockDetail.txt")
    val tbStockDetailDS: Dataset[TbStockDetail] = tbStockDetailRDD.map(_.split(",")).map(attr =>
      TbStockDetail(attr(0), attr(1).toInt, attr(2), attr(3).toInt, attr(4).toDouble, attr(5).toDouble)).toDS()
    tbStockDetailDS.show()
    tbStockDetailDS.createOrReplaceTempView("detail")

    // 计算所有订单中每年的销售单数、销售总额
    val sql1 = "select t2.theYear, count(distinct(t1.orderNumber)), sum(t3.amount) " +
      "from stock t1 join date t2 on t1.dateId = t2.dateId " +
      "join detail t3 on t1.orderNumber = t3.orderNumber " +
      "group by t2.theYear"
    spark.sql(sql1).show()

    // 计算所有订单每年最大金额订单的销售额
    val sql2: String = "select t4.theYear, max(t4.sumAmount) from " +
      "(select t3.theYear, t1.orderNumber, sum(t2.amount) as sumAmount " +
      "from stock t1 join detail t2 on t1.orderNumber = t2.orderNumber " +
      "join date t3 on t1.dateId = t3.dateId  group by t3.theYear, t1.orderNumber) t4 " +
      "group by t4.theYear"
    spark.sql(sql2).show()

    // 计算所有订单中每年最畅销货品
    val sql3: String = "select t4.theYear, max(t4.sumAmount) from " +
      "(select t3.theYear, t2.itemId, sum(t2.amount) as sumAmount " +
      "from stock t1 join detail t2 on t1.orderNumber = t2.orderNumber " +
      "join date t3 on t1.dateId = t3.dateId  group by t3.theYear, t2.itemId) t4 " +
      "group by t4.theYear"
    spark.sql(sql3).show()
  }
}

case class TbStock(orderNumber: String, locationId: String, dateId: String) extends Serializable

case class TbStockDetail(orderNumber: String, rowNum: Int, itemId: String, number: Int, price: Double, amount: Double) extends Serializable

case class TbDate(dateId: String, years: Int, theYear: Int, month: Int, day: Int, weekday: Int, week: Int, quarter: Int, period: Int, halfMonth: Int) extends Serializable

