package com.niit.DWD

import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.log4j.{Level, Logger}
import java.text.SimpleDateFormat
import java.util.Date

object DwdUserClick {
  def main(args: Array[String]): Unit = {
    // 设置日志级别
    Logger.getLogger("org").setLevel(Level.WARN)
    Logger.getLogger("akka").setLevel(Level.WARN)

    // 参数处理
    val dt = if (args.length > 0) args(0) else {
      val sdf = new SimpleDateFormat("yyyy-MM-dd")
      sdf.format(new Date)
    }

    // 1. 初始化SparkSession，明确配置Hive metastore
    val spark = SparkSession.builder()
      .appName("ODS to DWD: dwd_user_click")
      .master("local[*]")
      .config("spark.testing.memory", "512000000")
      .config("hive.metastore.uris", "thrift://192.168.10.130:9083")
      .config("hive.exec.dynamic.partition", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .enableHiveSupport()
      .getOrCreate()

    try {
      // 2. 读取ODS层数据
      spark.sql("USE commerces")

      val odsUserActionDF = spark.sql(
        s"""
           |SELECT
           |  date, user_id, session_id, page_id, action_time,
           |  search_keyword, click_category_id, click_product_id, city_id
           |FROM ods_user_visit_action
           |WHERE date = '$dt' AND click_category_id IS NOT NULL AND click_category_id != -1
        """.stripMargin)

      // 3. 关联城市维度表
      val dimCityDF = spark.sql(
        """
          |SELECT city_id, region_name
          |FROM dim_city_info
          |WHERE is_valid = 1
        """.stripMargin)

      // 4. 数据处理与质量校验
      val userClickWithCityDF = odsUserActionDF
        .join(dimCityDF, Seq("city_id"), "left")
        .select(
          col("user_id"), col("session_id"), col("page_id"),
          col("action_time"), col("search_keyword"),
          col("click_category_id"), col("click_product_id"),
          col("city_id"), col("region_name"), col("date").alias("dt")
        )

      val invalidCityCount = userClickWithCityDF.filter(col("region_name").isNull).count()
      if (invalidCityCount > 0) {
        println(s"警告：发现$invalidCityCount")
        userClickWithCityDF.filter(col("region_name").isNull).select("city_id").distinct().show(false)
      }

      // 5. 创建DWD层表（使用正确的Hive仓库路径）
      spark.sql(
        """
          |CREATE EXTERNAL TABLE IF NOT EXISTS dwd_user_click (
          |  user_id BIGINT, session_id STRING, page_id INT,
          |  action_time STRING, search_keyword STRING,
          |  click_category_id INT, click_product_id INT,
          |  city_id INT, region_name STRING
          |)
          |PARTITIONED BY (dt STRING)
          |STORED AS PARQUET
          |LOCATION 'hdfs://192.168.10.130:9000/training/hive/warehouse/commerces/dwd/dwd_user_click'
        """.stripMargin)

      // 6. 写入数据（使用正确的Hive仓库路径）
      userClickWithCityDF.write
        .mode(SaveMode.Overwrite)
        .option("path", "hdfs://192.168.10.130:9000/training/hive/warehouse/commerces/dwd/dwd_user_click")
        .insertInto("dwd_user_click")

      // 7. 验证结果
      val resultCount = spark.sql(s"SELECT COUNT(*) FROM dwd_user_click WHERE dt = '$dt'").head().getLong(0)
      println(s"成功写入$dt，共${resultCount}条记录")

    } catch {
      case e: Exception =>
        println(s"处理失败: ${e.getMessage}")
        e.printStackTrace()
        System.exit(1)
    } finally {
      spark.stop()
    }
  }
}