package com.liyueheng.app.dataWarehouseDetail.portrait

import com.liyueheng.util.{ConfigLoader, SaveAsTable, SparkConf}
import org.apache.spark.sql.functions._

object RegionPortrait {
  def analyzeRegion(): Unit = {
    println("------------------ 分析地域分布 -----------------")
    val spark = SparkConf.createSparkSession("RegionPortrait")
    val dwd = ConfigLoader.getString("databases.dwd")
    val dws = ConfigLoader.getString("databases.dws")

    val df = spark.sql(
        """
          |SELECT COUNT(id) AS value,
          |       CASE
          |         WHEN INSTR(address, '自治区') != 0 THEN SUBSTR(address, 1, INSTR(address, '区'))
          |         WHEN INSTR(address, '省') != 0 THEN SUBSTR(address, 1, INSTR(address, '省'))
          |         WHEN INSTR(address, '市') != 0 THEN SUBSTR(address, 1, INSTR(address, '市'))
          |         ELSE '未知'
          |       END AS name
          |FROM gmall_dwd.user
          |GROUP BY
          |       CASE
          |         WHEN INSTR(address, '自治区') != 0 THEN SUBSTR(address, 1, INSTR(address, '区'))
          |         WHEN INSTR(address, '省') != 0 THEN SUBSTR(address, 1, INSTR(address, '省'))
          |         WHEN INSTR(address, '市') != 0 THEN SUBSTR(address, 1, INSTR(address, '市'))
          |         ELSE '未知'
          |       END
        """.stripMargin)
      .withColumnRenamed("user_count", "user_count")
      .withColumnRenamed("region", "region")

    df.show()

    // 确保写入 Hive 时使用 UTF-8 编码
    spark.sql(s"DROP TABLE IF EXISTS $dws.detail_region_portrait")
    df.write
      .mode("overwrite")
      .format("hive")
      .option("encoding", "UTF-8")
      .saveAsTable(s"$dws.detail_region_portrait")

    SparkConf.stopSparkSession(spark)
  }

  def main(args: Array[String]): Unit = {
      analyzeRegion()
  }
}
