package cn.itcast.xc.analysis.buy

import cn.itcast.xc.common.EtlEnvironment
import cn.itcast.xc.entity.CourseBuyBiz
import org.apache.spark.sql.SparkSession

/**
 * <p>
 * 数据统计：课购买量以天为单位计算
 * </p>
 **/
object CourseBuyProcessByBiz {

  // 获取spark session
  val spark: SparkSession = EtlEnvironment.getSparkSession(this.getClass.getSimpleName)

  def main(args: Array[String]): Unit = {
    import spark.implicits._
    val date_info = args(0)
    // 以天为单位进行求和计算
    val buyDS = spark.sql(
      s"""
        |SELECT
        |    concat_ws('-',years, months, days) as date_info,
        |    cast(sum(salesvolume) as int) count,
        |    years, months, days
        |FROM data_course.course_buy_dwm
        |WHERE  concat_ws('-',years, months, days) = '${date_info}'
        |GROUP BY years, months, days
        |""".stripMargin).as[CourseBuyBiz]
    // 方便测试， 不过滤日期
/*    val buyDS = spark.sql(
      """
        |SELECT
        |    concat_ws('-',years, months, days) as date_info,
        |    cast(sum(salesvolume) as int) count,
        |    years, months, days
        |FROM data_course.course_buy_dwm
        |GROUP BY years, months, days
        |""".stripMargin).as[CourseBuyBiz]*/

    // 输出结果
//    buyDS.show()

    // 配置允许动态分区
    spark.conf.set("hive.exec.dynamic.partition.mode", "nonstrict")

    buyDS
      // 修改分区
      .repartition(1)
      //写入模式
      .write.mode("overwrite")
      // 指定表
      .insertInto("data_course.course_buy_biz")

    // 查看结果
    spark.sql("select * from data_course.course_buy_biz").show()

    // 关闭资源
    spark.close()
  }

}
