package com.atguigu.sparktuning.partition_01

import com.atguigu.sparktuning.utils.InitUtil
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
  * @Author  Zhongxu.Zhao
  * @ClassName PartitionDemo
  * @Date 2021/12/16 23:49
  * @Desc: TODO
 **/
object PartitionDemo {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local")
      .set("spark.sql.autoBroadcastJoinThreshold", "-1")
    val spark: SparkSession = InitUtil.initSparkSession(sparkConf)
    import spark.implicits._
    // 读取三张表
    val saleCourse: DataFrame = spark.sql("select * from sparktuning.sale_course")
    val coursePay: DataFrame = spark.sql("select * from sparktuning.course_pay")
      .withColumnRenamed("discount","pay_discount")
      .withColumnRenamed("createtime","pay_createtime")
    val courseShoppingCart: DataFrame = spark.sql("select * from sparktuning.course_shopping_cart")
      .withColumnRenamed("discount","cart_discount")
      .withColumnRenamed("createtime","cart_createtime") // 字段重命名

    saleCourse
      .join(courseShoppingCart,Seq("courseid","dt","dn"),"right")
      .join(coursePay,Seq("orderid","dt","dn"),"left")
      .select("courseid", "coursename", "status", "pointlistid", "majorid", "chapterid", "chaptername", "edusubjectid"
        , "edusubjectname", "teacherid", "teachername", "coursemanager", "money", "orderid", "cart_discount", "sellmoney",
        "cart_createtime", "pay_discount", "paymoney", "pay_createtime", "dt", "dn")
      .write.mode(SaveMode.Overwrite).saveAsTable("sparktuning.salecourse_detail") // 这种方式太不友好了 不好测试 不好编程 对json 友好 可以精确控制

    // sqk 和 scala 结合 使用



  }
}
