package com.atguigu.sellcourse.service

import com.atguigu.sellcourse.bean.{DwdCourseShoppingCart, DwdSaleCourse}
import com.atguigu.sellcourse.dao.DwdSellCourseDao
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.mutable.ArrayBuffer
import scala.util.Random

/**
 * description ：读取 dwd 层数据， 合并成为售课详情大宽表
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/3/10 14:53
 * modified By ：
 * version:    : 1.0
 */
object DwsSellCourseService {
  /**
   * 读取 3 张表， 使用 df join
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:56
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importSellCourseDetail(spark: SparkSession, dt: String, dn: String) = {
    // 读取售课模块表
    val saleCourseDf = DwdSellCourseDao.getSaleCourse(spark, dt, dn)
    // 读取购物车模块表
    val courseShoppingCartDf = DwdSellCourseDao.CourseShoppingCart(spark, dt, dn)
      .drop("coursename")
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("cretetime", "cart_cretetime")
    // 读取售课支付表
    val coursePayDf = DwdSellCourseDao.getCoursePay(spark, dt, dn)
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("cretetime", "pay_createtime")

    // saleCourseDf, courseShoppingCartDf, coursePayDf 表 join
    saleCourseDf
      .join(courseShoppingCartDf, Seq("courseid", "dt", "dn"), "right")
      .join(coursePayDf, Seq("orderid", "dt", "dn"), "left")
      .select("courseid", "coursename", "status", "pointlistid", "majorid", "chapterid", "chaptername", "edusubjectid"
        , "edusubjectname", "teacherid", "teachername", "coursemanager", "money", "orderid", "cart_discount", "sellmoney",
        "cart_createtime", "pay_discount", "paymoney", "pay_createtime", "dt", "dn")
      .write.mode(SaveMode.Overwrite).insertInto("dws.dws_salecourse_detail")
  }

  /**
   * 使用 大表打散，小表成倍扩容方法进行连接
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:56
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importSellCourseDetail2(spark: SparkSession, dt: String, dn: String) = {
    // 读取售课模块表
    val saleCourseDf = DwdSellCourseDao.getSaleCourse(spark, dt, dn)
    // 读取购物车模块表
    val courseShoppingCartDf = DwdSellCourseDao.CourseShoppingCart(spark, dt, dn)
      .drop("coursename")
      .withColumnRenamed("discount", "cart_discount")
      .withColumnRenamed("cretetime", "cart_cretetime")
    // 读取售课支付表
    val coursePayDf = DwdSellCourseDao.getCoursePay(spark, dt, dn)
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("cretetime", "pay_createtime")

    // 大表打散 courseid 添加随机数 0 ~ 99
    import spark.implicits._
    val newCourseShoppingCartDf = courseShoppingCartDf
      .mapPartitions(it => {
        it.map(line => {
          val courseid = line.getAs[Int]("courseid")
          val orderid = line.getAs[String]("orderid")
          val coursename = line.getAs[String]("coursename")
          val cart_discount = line.getAs[java.math.BigDecimal]("cart_discount")
          val sellmoney = line.getAs[java.math.BigDecimal]("sellmoney")
          val createtime = line.getAs[java.sql.Timestamp]("createtime")
          val dt = line.getAs[String]("dt")
          val dn = line.getAs[String]("dn")
          DwdCourseShoppingCart(courseid, orderid, coursename, cart_discount, sellmoney, createtime, dt, dn, s"${courseid}_${Random.nextInt(100)}")
        })
      })

    // 小表成倍扩容, 0 ~ 99
    val newSaleCourseDf = saleCourseDf
      .flatMap(line => {
        val courseid = line.getAs[Int]("courseid")
        val coursename = line.getAs[String]("coursename")
        val status = line.getAs[String]("status")
        val pointlistid = line.getAs[Int]("pointlistid")
        val majorid = line.getAs[Int]("majorid")
        val chapterid = line.getAs[Int]("chapterid")
        val chaptername = line.getAs[String]("chaptername")
        val edusubjectid = line.getAs[Int]("edusubjectid")
        val edusubjectname = line.getAs[String]("edusubjectname")
        val teacherid = line.getAs[Int]("teacherid")
        val teachername = line.getAs[String]("teachername")
        val coursemanager = line.getAs[String]("coursemanager")
        val money = line.getAs[java.math.BigDecimal]("money")
        val dt = line.getAs[String]("dt")
        val dn = line.getAs[String]("dn")
        val list = new ArrayBuffer[DwdSaleCourse]()
        for (i <- 0 until 100) {
          list.append(DwdSaleCourse(courseid, coursename, status, pointlistid, majorid, chapterid, chaptername, edusubjectid,
            edusubjectname, teacherid, teachername, coursemanager, money, dt, dn, s"${courseid}_${i}"))
        }
        list
      })

    // saleCourseDf, courseShoppingCartDf, coursePayDf 表 join
    newSaleCourseDf
      .join(newCourseShoppingCartDf, Seq("rand_courseid", "dt", "dn"), "right")
      .join(coursePayDf, Seq("orderid", "dt", "dn"), "left")
      .select("courseid", "coursename", "status", "pointlistid", "majorid", "chapterid", "chaptername", "edusubjectid"
        , "edusubjectname", "teacherid", "teachername", "coursemanager", "money", "orderid", "cart_discount", "sellmoney",
        "cart_createtime", "pay_discount", "paymoney", "pay_createtime", "dt", "dn")
      .write.mode(SaveMode.Overwrite).insertInto("dws.dws_salecourse_detail")
  }

  /**
   * 使用小表广播 join
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:56
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importSellCourseDetail3(spark: SparkSession, dt: String, dn: String) = {
    // 读取售课模块表
    val saleCourseDf = DwdSellCourseDao.getSaleCourse(spark, dt, dn)
    // 读取购物车模块表
    val courseShoppingCartDf = DwdSellCourseDao.CourseShoppingCart(spark, dt, dn)
      .drop("coursename")
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("cretetime", "cart_cretetime")
    // 读取售课支付表
    val coursePayDf = DwdSellCourseDao.getCoursePay(spark, dt, dn)
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("cretetime", "pay_createtime")

    // 使用小表广播 join
    import org.apache.spark.sql.functions._
    broadcast(saleCourseDf) // 将 saleCourseDf 广播出去
      .join(courseShoppingCartDf, Seq("courseid", "dt", "dn"), "right")
      .join(coursePayDf, Seq("orderid", "dt", "dn"), "left")
      .select("courseid", "coursename", "status", "pointlistid", "majorid", "chapterid", "chaptername", "edusubjectid"
        , "edusubjectname", "teacherid", "teachername", "coursemanager", "money", "orderid", "cart_discount", "sellmoney",
        "cart_createtime", "pay_discount", "paymoney", "pay_createtime", "dt", "dn")
      .write.mode(SaveMode.Overwrite).insertInto("dws.dws_salecourse_detail")
  }

  /**
   * 使用 smb join
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:56
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importSellCourseDetail4(spark: SparkSession, dt: String, dn: String) = {
    // 读取售课模块表
    val saleCourseDf = DwdSellCourseDao.getSaleCourse(spark, dt, dn)
    // 读取购物车模块表 buket
    val courseShoppingCartDf = DwdSellCourseDao.CourseShoppingCartBuket(spark, dt, dn)
      .drop("coursename")
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("cretetime", "cart_cretetime")
    // 读取售课支付表 buket
    val coursePayDf = DwdSellCourseDao.getCoursePayBuket(spark, dt, dn)
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("cretetime", "pay_createtime")

    // 售课表使用广播变量， 订单和支付表使用分桶表  smb join
    import org.apache.spark.sql.functions._
    val tmpDate = courseShoppingCartDf.join(coursePayDf, Seq("orderid"), "left") // 先让分桶表 join
    broadcast(saleCourseDf).join(tmpDate, Seq("courseid", "dt", "dn"), "right") // 再使用广播变量的表连接 tmpDate
      .select("courseid", "coursename", "status", "pointlistid", "majorid", "chapterid", "chaptername", "edusubjectid"
        , "edusubjectname", "teacherid", "teachername", "coursemanager", "money", "orderid", "cart_discount", "sellmoney",
        "cart_createtime", "pay_discount", "paymoney", "pay_createtime", "dt", "dn")
      .write.mode(SaveMode.Overwrite).insertInto("dws.dws_salecourse_detail")
  }
}
