package cn.itcast.xc.analysis.buy

import cn.itcast.xc.common.EtlEnvironment
import cn.itcast.xc.entity.CourseBuyDWM
import org.apache.spark.sql.SparkSession

/**
 * <p>
 * 课程购买量 DWM（事实表清洗）, 课购买量以天为单位计算
 * </p>
 **/
object CourseBuyProcessToDWM {
  // 获取spark session
  val spark: SparkSession = EtlEnvironment.getSparkSession(this.getClass.getSimpleName)

  def main(args: Array[String]): Unit = {
    import spark.implicits._
    //    val date_info = "2019-09-23"
    val date_info = args(0)

    // 关联维度， 轻度聚合
    val buyDS = spark.sql(
      s"""
         |SELECT
         |    a.course_dim_id,
         |    b.company_id,
         |    b.mt course_category_dim_id,
         |    a.sales,
         |    a.salesvolume,
         |    a.years, a.months, a.days
         |FROM
         |    (SELECT
         |        course_dim_id,
         |        sum(price) sales,
         |        count(course_dim_id) salesvolume,
         |        years, months, days
         |    from data_course.course_buy_fact WHERE concat_ws('-', years, months, days) = '${date_info}'
         |    GROUP BY course_dim_id, years, months, days) as a
         |LEFT JOIN data_dimen.course_dim b
         |on a.course_dim_id = b.course_dim_id
         |""".stripMargin).as[CourseBuyDWM]

    /*    val buyDS = spark.sql(
          s"""
             |SELECT
             |    a.course_dim_id,
             |    b.company_id,
             |    b.mt course_category_dim_id,
             |    a.sales,
             |    a.salesvolume,
             |    a.years, a.months, a.days
             |FROM
             |    (SELECT
             |        course_dim_id,
             |        sum(price) sales,
             |        count(course_dim_id) salesvolume,
             |        years, months, days
             |    from data_course.course_buy_fact
             |    GROUP BY course_dim_id, years, months, days) as a
             |LEFT JOIN data_dimen.course_dim b
             |on a.course_dim_id = b.course_dim_id
             |""".stripMargin).as[CourseBuyDWM]*/

    //    buyDS.show()

    // 配置允许动态分区
    spark.conf.set("hive.exec.dynamic.partition.mode", "nonstrict")

    buyDS
      // 修改分区
      .repartition(1)
      // 写入模式
      .write.mode("overwrite")
      // 指定表名
      .insertInto("data_course.course_buy_dwm")

    // 查询结果
    spark.sql("select * from data_course.course_buy_dwm").show()


    // 关闭资源
    spark.close()

  }
}
