package com.atguigu.sellcourse.service

import com.alibaba.fastjson.JSONObject
import com.atguigu.constan.Constan
import com.atguigu.util.JsonUtil
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * description ：读取售课模块表 etl 并写入 dwd 层， service
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/3/10 14:35
 * modified By ：
 * version:    : 1.0
 */
object DwdSellCourseService {

  /**
   * 售课购物车日志 etl 并写入 dwd 层
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:43
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importCourseShoppingCartBuket(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._

    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/courseshoppingcart.log")

    // etl 并入库 dwd 层
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(partitions => {
        partitions.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val courseid = jsonObject.getString("courseid")
          val orderid = jsonObject.getString("orderid")
          val coursename = jsonObject.getString("coursename")
          val discount = jsonObject.getString("discount")
          val sellmoney = jsonObject.getString("sellmoney")
          val createtime = jsonObject.getString("createtime")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (courseid, orderid, coursename, discount, sellmoney, createtime, dt, dn)
        })
      })
      .toDF()
      .coalesce(6)
      .write
      .partitionBy("dt", "dn")
      .bucketBy(10, "orderid")
      .sortBy("orderid")
      .mode(SaveMode.Overwrite).insertInto("dwd.dwd_course_shopping_cart_buket")
  }


  /**
   * 读取售课日志 etl 并写入 dwd 层(分桶表)
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:40
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importCoursePayBuket(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._

    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/coursepay.log")

    // etl 并入库 dwd 层
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(partitions => {
        partitions.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val orderid = jsonObject.getString("orderid")
          val paymoney = jsonObject.getString("paymoney")
          val discount = jsonObject.getString("discount")
          val createtime = jsonObject.getString("createitme")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (orderid, discount, paymoney, createtime, dt, dn)
        })
      })
      .toDF()
      .coalesce(1)
      .write
      .partitionBy("dt", "dn")
      .bucketBy(10, "orderid")
      .sortBy("orderid")
      .mode(SaveMode.Overwrite).insertInto("dwd.dwd_course_pay_buket")
  }

  /**
   * 售课购物车日志 etl 并写入 dwd 层
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:43
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importCourseShoppingCart(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._

    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/courseshoppingcart.log")

    // etl 并入库 dwd 层
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(partitions => {
        partitions.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val courseid = jsonObject.getString("courseid")
          val orderid = jsonObject.getString("orderid")
          val coursename = jsonObject.getString("coursename")
          val discount = jsonObject.getString("discount")
          val sellmoney = jsonObject.getString("sellmoney")
          val createtime = jsonObject.getString("createtime")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (courseid, orderid, coursename, discount, sellmoney, createtime, dt, dn)
        })
      }).toDF().coalesce(6).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_course_shopping_cart")
  }

  /**
   * 读取售课日志 etl 并写入 dwd 层
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:40
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importCoursePay(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._

    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/coursepay.log")

    // etl 并入库 dwd 层
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(partitions => {
        partitions.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val orderid = jsonObject.getString("orderid")
          val paymoney = jsonObject.getString("paymoney")
          val discount = jsonObject.getString("discount")
          val createtime = jsonObject.getString("createitme")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (orderid, discount, paymoney, createtime, dt, dn)
        })
      }).toDF().coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_course_pay")
  }

  /**
   * 售课日志 elt 并写入 dwd 层
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/10 14:36
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def importSaleCourseLog(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._ //隐式转换

    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/salecourse.log")

    // etl 并入库 dwd 层
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(partition => {
        partition.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val courseid = jsonObject.getString("courseid")
          val coursename = jsonObject.getString("coursename")
          val status = jsonObject.getString("status")
          val pointlistid = jsonObject.getString("pointlistid")
          val majorid = jsonObject.getString("majorid")
          val chapterid = jsonObject.getString("chapterid")
          val chaptername = jsonObject.getString("chaptername")
          val edusubjectid = jsonObject.getString("edusubjectid")
          val edusubjectname = jsonObject.getString("edusubjectname")
          val teacherid = jsonObject.getString("teacherid")
          val teachername = jsonObject.getString("teachername")
          val coursemanager = jsonObject.getString("coursemanager")
          val money = jsonObject.getString("money")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (courseid, coursename, status, pointlistid, majorid, chapterid, chaptername,
            edusubjectid, edusubjectname, teacherid, teachername, coursemanager, money, dt, dn)
        })
      }).toDF().coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_sale_course")
  }

}
