package cn.itcast.xc.analysis.subject.learn

import cn.itcast.xc.common.EtlEnvironment
import cn.itcast.xc.entity.{CourseDimen, LearningCourseFact, LearningCourseSource}
import cn.itcast.xc.utils.DateUtils.getDateStr
import org.apache.spark.sql.SparkSession

/**
 * <p>
 * 用户选课源原始数据清洗到事实表
 * </p>
 **/
object UserLearnSourceToFact {

  // 获取spark session
  val spark: SparkSession = EtlEnvironment.getSparkSession(this.getClass.getSimpleName)

  def main(args: Array[String]): Unit = {
    import spark.implicits._

    // 关联维度-课程分类维度（通过课程维度）
    var courseMap = Map[String, String]()
    spark.sql("SELECT * FROM data_dimen.course_dim").as[CourseDimen].collect().map(x => {
      courseMap += (x.course_dim_id.toString -> x.mt)
    })

    // 获取用户选课原始
//    val date_info = "2019-11-11"
    val date_info = args(0)
        val learnSource = spark.sql(s"SELECT * from data_course.learning_course_source WHERE from_unixtime(choose_time, 'yyyy-MM-dd') ='${date_info}'")
//    val learnSource = spark.sql(s"SELECT * from data_course.learning_course_source")
      .as[LearningCourseSource].collect()

    // 封装到事实表
    val learnFact = learnSource.map(obj => {
      //- 课程分类维度
      var catId = "-1"
      try {
        catId = courseMap.get(obj.course_id).get
      } catch {
        case e =>
      }
      //- 时间维度
      val timeId = getDateStr(obj.choose_time.toLong * 1000, "yyyy-MM-dd-HH")
      val ymd = timeId.split("-")
      LearningCourseFact(
        obj.learning_course_id, catId, obj.course_id, obj.user_id, timeId, obj.status, ymd(0), ymd(1), ymd(2))
    })


    // 保存到数据仓库
    // 转换为df
    val learnDF = spark.createDataFrame(learnFact)
    // 配置允许动态分区
    spark.conf.set("hive.exec.dynamic.partition.mode", "nonstrict")
    // 保存到数据仓库
    learnDF.repartition(1)
      .write.mode("overwrite")
      .insertInto("data_course.learning_course_fact")
    // 查看结果
    spark.sql("select * from data_course.learning_course_fact").show()

    // 关闭资源
    spark.close()
  }
}
