package com.sunzm.spark.sql.analyze

import com.alibaba.fastjson.JSON
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 *
 * SparkSQL执行流程分析
 *
 * @author Administrator
 * @version 1.0
 * @date 2021-06-29 20:50
 */
object SparkSQALanalyzeJob {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.stripSuffix("$"))
      .master("local[*]")
      .config("spark.default.parallelism", 6)
      .config("spark.sql.shuffle.partitions", 6)
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    //手动解析
    val dataDS: Dataset[String] = spark.read.textFile("data/spark/sql/msgLog.dat")

    dataDS.filter(line => {
      var res = false
      try {
        val jSONObject = JSON.parseObject(line)

        val companyId: String = jSONObject.getString("companyId")
        val sessionId: String = jSONObject.getString("sessionId")
        val action: Int = jSONObject.getIntValue("action")

        if (action == 5 && StringUtils.isNotBlank(companyId) && StringUtils.isNotBlank(sessionId)) {
          res = true
        }
      } catch {
        case e: Throwable => {
          e.printStackTrace()
        }
      }

      res
    }).map(line => {
      val jSONObject = JSON.parseObject(line)
      val companyId = jSONObject.getString("companyId")
      val sessionId = jSONObject.getString("sessionId")
      val datetime = jSONObject.getLongValue("datetime")
      val action = jSONObject.getIntValue("action")

      (companyId, sessionId, datetime, action)
    }).toDF("companyId", "sessionId", "datetime", "action")
      .createOrReplaceTempView("v_msgLog")

   /* val resultDF: DataFrame = spark.sql(
      """
        | SELECT companyId, sessionId, datetime,action
        |   FROM v_msgLog
        |  WHERE action = 1 + 2 + 3
        |""".stripMargin)*/

    val resultDF: DataFrame = spark.sql(
      """
        | SELECT sessionId
        |   FROM v_msgLog
        | GROUP BY sessionId
        |""".stripMargin)

    //这样会输出 Parsed Logical Plan, Analyzed Logical Plan, Optimized Logical Plan, Physical Plan
    resultDF.explain(true)

    //这个可以单独获取逻辑计划的每一步
    //resultDF.queryExecution

    //这个只会输出物理执行计划
    //resultDF.explain()

    //resultDF.show(false)

    spark.stop()
  }
}
