package cn.itcast.xc.analysis.buy

import cn.itcast.xc.common.EtlEnvironment
import cn.itcast.xc.entity._
import cn.itcast.xc.utils.DateUtils.getDateStr
import cn.itcast.xc.utils.IpUtils
import com.alibaba.fastjson.JSON
import org.apache.spark.sql.SparkSession

/**
 * <p>
 * 课程购买量 DWD（事实表清洗）
 * </p>
 **/
object CourseBuyHiveToFact {
  // 获取spark session
  val spark: SparkSession = EtlEnvironment.getSparkSession(this.getClass.getSimpleName)

  def main(args: Array[String]): Unit = {
    import spark.implicits._

    // 维度数据的获取
    // 用户维度
    var userMap = Map[String, String]()
    spark.sql("select * from data_dimen.user_dim").as[UserDimen].collect().map(x => {
      userMap += (x.user_dim_id -> x.ip)
    })
    // 区域维度
    var areaMap = Map[String, String]()
    spark.sql("SELECT * FROM data_dimen.area_dim").as[AreaDimen].collect().map(x => {
      areaMap += (x.city_name -> x.area_dim_id.toString)
    })
    // 公司维度
    var courseMap = Map[String, String]()
    spark.sql("SELECT * FROM data_dimen.course_dim").as[CourseDimen].collect().map(x => {
      if (x.course_dim_id != null && x.company_id != null) {
        courseMap += (x.course_dim_id.toString -> x.company_id.toString)
      }
    })

    // 获取原始数据
    val date_info = args(0)
//    val date_info = "2019-11-13"

        val courseBuySource = spark.sql(s"select * from data_course.course_orders_source where from_unixtime(order_time, 'yyyy-MM-dd') = '${date_info}'")
          .as[CourseOrdersSource].collect()

//    val courseBuySource = spark.sql(s"select * from data_course.course_orders_source").as[CourseOrdersSource].collect()

    val cleanCourseBuy = courseBuySource.map(obj => {
      // 时间维度
      val time_dim_id = getDateStr(obj.order_time.toLong * 1000, "yyyy-MM-dd-HH")
      // 获取维度
      var areaId = "-1"
      try {
        areaMap.get(IpUtils.getArea(userMap.get(obj.user_id).get)).get
      } catch {
        case e =>
      }

      // 公司维度
      val json = JSON.parseArray(obj.details).getJSONObject(0)
      val comId = courseMap.get(json.getString("itemId")).get

      // 年月日分区
      val ymd = time_dim_id.split("-")
      // 封装事实表数据
      CourseBuyFact(obj.order_number,
        json.getString("itemId"),
        time_dim_id, areaId,
        comId, obj.price,
        obj.status, obj.user_id,
        ymd(0), ymd(1), ymd(2)
      )
    })

    // 转为df， 方便保存
    val buyDF = spark.createDataFrame(cleanCourseBuy)

    // 配置允许动态分区
    spark.conf.set("hive.exec.dynamic.partition.mode", "nonstrict")

    buyDF
      // 修改分区
      .repartition(1)
      //写入模式
      .write.mode("overwrite")
      // 指定表
      .insertInto("data_course.course_buy_fact")

    // 查看结果
    spark.sql("select * from data_course.course_buy_fact").show()


    // 关闭资源
    spark.close()
  }

}
