package com.atbeijing.sparksqltuning

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

object AqeTest {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")
    val sparkConf = new SparkConf().setAppName("test")
      .set("spark.sql.autoBroadcastJoinThreshold", "-1")//为了测试,关闭广播join
      .set("spark.sql.adaptive.enabled","true")//开启aqe
      .set("spark.sql.adaptive.coalescePartitions.enabled","true")//开启动态缩小分区
      //如果一个分区的大小大于spark.sql.adaptive.skewJoin.skewedPartitionFactor乘以中值分区大小，
      // 并且大于'spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes'，那么该分区就被认为是倾斜
      .set("spark.sql.adaptive.skewJoin.skewedPartitionFactor","2")
      .set("spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes","10mb")
      //自适应优化期间shuffle分区的建议大小(当spark.sql. adaptive_enabled为true时)。当Spark对小shuffle分区或斜shuffle分区进行合并时生效。
      .set("spark.sql.adaptive.advisoryPartitionSizeInBytes","8mb")
    val sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()
    val ssc = sparkSession.sparkContext
    ssc.hadoopConfiguration.set("fs.defaultFS", "hdfs://mycluster")
    ssc.hadoopConfiguration.set("dfs.nameservices", "mycluster")
    useJoin(sparkSession)
  }

  def useJoin(sparkSession: SparkSession) = {
    val saleCourse = sparkSession.sql("select *from dwd.dwd_sale_course")
    val coursePay = sparkSession.sql("select * from dwd.dwd_course_pay")
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("createtime", "pay_createtime")
    val courseShoppingCart = sparkSession.sql("select *from dwd.dwd_course_shopping_cart")
      .drop("coursename")
      .withColumnRenamed("discount", "cart_discount")
      .withColumnRenamed("createtime", "cart_createtime")
    saleCourse.join(courseShoppingCart, Seq("courseid", "dt", "dn"), "right")
      .join(coursePay, Seq("orderid", "dt", "dn"), "left")
      .select("courseid", "coursename", "status", "pointlistid", "majorid", "chapterid", "chaptername", "edusubjectid"
        , "edusubjectname", "teacherid", "teachername", "coursemanager", "money", "orderid", "cart_discount", "sellmoney",
        "cart_createtime", "pay_discount", "paymoney", "pay_createtime", "dt", "dn")
      .write.mode(SaveMode.Overwrite).insertInto("dws.dws_salecourse_detail_1")
  }

  def switchJoinStartegies(sparkSession: SparkSession) = {
    //    val saleCourse = sparkSession.sql("select *from dwd.dwd_sale_course")
    val coursePay = sparkSession.sql("select * from dwd.dwd_course_pay")
      .withColumnRenamed("discount", "pay_discount")
      .withColumnRenamed("createtime", "pay_createtime")
      .where("orderid between 'odid-9999000' and 'odid-9999999'")
    val courseShoppingCart = sparkSession.sql("select *from dwd.dwd_course_shopping_cart")
      .drop("coursename")
      .withColumnRenamed("discount", "cart_discount")
      .withColumnRenamed("createtime", "cart_createtime")
    val tmpdata = coursePay.join(courseShoppingCart, Seq("orderid"), "right")
    tmpdata.show()
  }
}
