package com.niit.DIM

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
import org.apache.log4j.{Level, Logger}

object DimCityInfo {
  def main(args: Array[String]): Unit = {
    // 设置日志级别，减少不必要的输出
    Logger.getLogger("org").setLevel(Level.WARN)
    Logger.getLogger("akka").setLevel(Level.WARN)

    // 1. 初始化SparkSession，启用Hive支持
    val spark = SparkSession.builder()
      .appName("ODS to DIM: dim_city_info")
      .master("local[*]")
      .config("spark.testing.memory", "512000000") // 设置足够的内存
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._
    println("SparkSession初始化完成，开始处理城市维度数据...")

    try {
      // 2. 读取ODS层原始城市表（ods_city_info）
      spark.sql("USE commerces")
      println("正在读取ODS层城市数据...")

      // 验证ODS表是否存在
      val tables = spark.catalog.listTables("commerces").collect()
      if (!tables.exists(_.name == "ods_city_info")) {
        throw new RuntimeException("源表ods_city_info不存在，请检查ODS层数据")
      }

      val odsCityDF = spark.sql(
        """
          |SELECT
          |  city_id,       -- 城市ID
          |  city_name,     -- 城市名称
          |  region         -- 原始区域（英文）
          |FROM ods_city_info
          |WHERE city_id IS NOT NULL AND city_name IS NOT NULL AND region IS NOT NULL -- 过滤空值
        """.stripMargin)

      println(s"读取到${odsCityDF.count()}条城市数据")
      odsCityDF.show(5, false)

      // 3. 数据清洗与转换（核心逻辑）
      println("开始数据清洗与转换...")

      // 增加数据验证：检查region字段是否包含未映射的值
      val unmappedRegions = odsCityDF
        .filter(!col("region").isin(
          "China North", "China East", "China South",
          "China Middle", "West North", "West South", "East North"
        ))
        .select("region")
        .distinct()
        .collect()
        .map(_.getString(0))

      if (unmappedRegions.nonEmpty) {
        println(s"警告：发现未映射的区域值：${unmappedRegions.mkString(", ")}")
        println("这些值将被映射为OTHER")
      }

      val dimCityDF = odsCityDF
        .withColumn("region_code",  // 区域编码（标准化）
          when(col("region") === "China North", "CN-North")
            .when(col("region") === "China East", "CN-East")
            .when(col("region") === "China South", "CN-South")
            .when(col("region") === "China Middle", "CN-Middle")
            .when(col("region") === "West North", "CN-West-North")
            .when(col("region") === "West South", "CN-West-South")
            .when(col("region") === "East North", "CN-East-North")
            .otherwise("OTHER")
        )
        .withColumn("region_name",  // 区域名称（中文转换）
          when(col("region") === "China North", "华北")
            .when(col("region") === "China East", "华东")
            .when(col("region") === "China South", "华南")
            .when(col("region") === "China Middle", "华中")
            .when(col("region") === "West North", "西北")
            .when(col("region") === "West South", "西南")
            .when(col("region") === "East North", "东北")
            .otherwise("其他")
        )
        .withColumn("is_valid", lit(1))  // 数据有效性（1=有效）
        .withColumn("create_time", current_timestamp())  // 创建时间
        .withColumn("update_time", current_timestamp())  // 更新时间
        .select(
          col("city_id"),
          col("city_name"),
          col("region_code"),
          col("region_name"),
          col("is_valid"),
          col("create_time"),
          col("update_time")
        )

      println("数据转换完成，处理后的数据样例：")
      dimCityDF.show(5, false)

      // 4. 创建DIM层表（若不存在）
      println("检查并创建DIM层表...")
      spark.sql(
        """
          |CREATE EXTERNAL TABLE IF NOT EXISTS dim_city_info (
          |  city_id INT COMMENT '城市ID',
          |  city_name STRING COMMENT '城市名称',
          |  region_code STRING COMMENT '区域编码',
          |  region_name STRING COMMENT '区域名称',
          |  is_valid INT COMMENT '是否有效',
          |  create_time TIMESTAMP COMMENT '创建时间',
          |  update_time TIMESTAMP COMMENT '更新时间'
          |)
          |STORED AS PARQUET
          |LOCATION 'hdfs://192.168.10.130:9000/warehouse/commerces/dim/dim_city_info'
        """.stripMargin)

      // 5. 将转换后的数据写入DIM表（全量覆盖，若需增量可调整逻辑）
      println("开始写入DIM层表...")
      dimCityDF.write
        .mode("overwrite")
        .option("path", "hdfs://192.168.10.130:9000/warehouse/commerces/dim/dim_city_info")
        .insertInto("dim_city_info")

      println("数据写入完成！")

      // 6. 验证写入结果
      val resultCount = spark.sql("SELECT COUNT(*) FROM dim_city_info").head().getLong(0)
      println(s"DIM层dim_city_info表共有${resultCount}条记录")

    } catch {
      case e: Exception =>
        println(s"处理过程中发生错误: ${e.getMessage}")
        e.printStackTrace()
        System.exit(1)
    } finally {
      // 关闭SparkSession
      spark.stop()
      println("SparkSession已关闭")
    }
  }
}