package cn.itcast.xc.analysis.search

import cn.itcast.xc.analysis.common.EtlEnvironment
import cn.itcast.xc.entity.UserLeanOnline
import cn.itcast.xc.utils.DateUtils.minConvertDayHourMin
import cn.itcast.xc.utils.StrUtils.getUuId
import org.apache.spark.sql.SparkSession

/**
 * <P>
 * 即时查询  步骤1
 *
 * </p>
 *
 */
object InstantQueryStep1ToHive {

  /**
   * 初始化 SparkSession
   */
  val spark: SparkSession = EtlEnvironment.getSparkSession(this.getClass.getSimpleName, null)

  def main(args: Array[String]): Unit = {
    // 获取sparkContext
    val sc = spark.sparkContext

    //    1. 根据日期hdfs中获取原始数据
    val date_info = args(0)
    //    val date_info = "2019-12-11"
    val learnSource = sc.textFile(s"/user/hive/external/data_course/learning_course_online_source/${date_info}")
    val learnSourceRDD = learnSource.map(_.split("\t")).map(arr => {
      UserLeanOnline(arr(0), arr(1), arr(2), arr(3), arr(4), arr(5), arr(6).toLong)
    })
    // 创建临时表
    spark.createDataFrame(learnSourceRDD).createOrReplaceTempView("learn_source_tmp")

    //    2. 关联相关维度表
    //    3. 统计学习时长
    // 注册udf函数
    spark.udf.register("getUuid", () => {
      getUuId()
    })
    spark.udf.register("minConvertDayHourMin", (time: Long) => {
      minConvertDayHourMin(time)
    })
    val userLearnDF = spark.sql(
      """
        |select
        | getUuid() learning_course_online_id,
        | l.course_id,
        | c.name course_name,
        | cv.video_name video_name,
        | l.user_id,
        | u.name user_name,
        | from_unixtime(l.learn_time, 'yyyy-MM-dd HH:mm:ss') learn_time,
        | minConvertDayHourMin(learn_count_tmp) learn_count,
        | from_unixtime(l.learn_time, 'yyyy-MM-dd') date_info
        |from
        |( select
        | user_id,
        | course_id,
        | course_video_id,
        | min(learn_time) learn_time,
        | count(1) learn_count_tmp
        |from learn_source_tmp
        |group by user_id, course_id, course_video_id, user_session_id ) l
        |left join data_dimen.course_dim c on c.course_dim_id = l.course_id
        |left join data_dimen.course_video_dim cv on cv.course_video_dim_id = l.course_video_id
        |left join data_dimen.user_dim u on u.user_dim_id = l.user_id
        |""".stripMargin)


    //    4. 保存到数据仓库中
    // 指定hive参数
    spark.conf.set("hive.exec.dynamic.partition.mode", "nonstrict")
    // 中间表名
    val tableName = "data_course.learning_course_online_dwm"
    userLearnDF
      // 修改分区
      .repartition(1)
      // 写入模式
      .write.mode("overwrite")
      // 指定表名
      .insertInto(s"${tableName}")

    // 查询结果
    spark.sql(s"select * from ${tableName}").show()

    // 关闭资源
    spark.close()
  }


}
