package com.liyueheng.app.dataWarehouseDetail.ranking

import com.liyueheng.util.{ConfigLoader, SparkConf}
import org.apache.spark.sql.functions._

object Top10RegionSpenders {
  def stat(): Unit = {
    println("------------------ 分析地区消费前十名 -----------------")
    val spark = SparkConf.createSparkSession("Top10RegionSpenders")
    val dwd = ConfigLoader.getString("databases.dwd")
    val dws = ConfigLoader.getString("databases.dws")

    val result = spark.sql(
      s"""
        SELECT ROUND(SUM(price), 2) AS `spender_sum`,
        CASE
        WHEN INSTR(address, '自治区') != 0 THEN SUBSTR(address, 1, INSTR(address, '区'))
        WHEN INSTR(address, '省') != 0 THEN SUBSTR(address, 1, INSTR(address, '省'))
        WHEN INSTR(address, '市') != 0 THEN SUBSTR(address, 1, INSTR(address, '市'))
        END AS `region`
        FROM $dwd.sku, $dwd.user, $dwd.user_act
        WHERE sku.sku_id = user_act.sku
        AND user_act.user = user.id
        AND act_type = 2
        GROUP BY
          CASE
        WHEN INSTR(address, '自治区') != 0 THEN SUBSTR(address, 1, INSTR(address, '区'))
        WHEN INSTR(address, '省') != 0 THEN SUBSTR(address, 1, INSTR(address, '省'))
        WHEN INSTR(address, '市') != 0 THEN SUBSTR(address, 1, INSTR(address, '市'))
        END
        ORDER BY `spender_sum` DESC
      LIMIT 10""")
    result.show()
    result.write.mode("overwrite").option("encoding", "UTF-8").format("hive").saveAsTable(s"$dws.detail_top10_region_spenders")
//    SaveAsTable.saveAsTable(result, s"$dws.detail_top10_region_spenders")
    SparkConf.stopSparkSession(spark)
  }

  def main(args: Array[String]): Unit = {
    stat()
  }
}
