package com.doit.sparksql.day03

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan

/**
 * @DATE 2022/1/16/15:22
 * @Author MDK
 * @Version 2021.2.2
 *
 *    逻辑执行计划
 *      1)分析SQL的正确性
 *      2)绑定元数据信息
 *      3)unresolvedRelation未绑定元数据信息  逻辑计划变成物理计划
 * */
object Demo_Parse03 {
  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local")
      .appName("demo_parse")
      .getOrCreate()

    import spark.implicits._
    //生成df  注册到环境中
    spark.createDataset(Seq((1,"zss",23),(2,"lss",35),(3,"wll",28))).toDF("id", "name", "age").createTempView("user1")
    spark.createDataset(Seq((1,"zss","F"),(2,"lss","F"),(3,"wll","M"))).toDF("id", "name","gender").createTempView("user2")

    val sql =
      """
        |select
        |o1.id, o1.name, o1.age,
        |o2.gender
        |from
        |(select id, name, age from user1 where name is not null)o1
        |join
        |(select id, name, gender from user2 where name is not null)o2
        |on o1.id = o2.id
        |where gender = 'F'
        |""".stripMargin
    val plan: LogicalPlan = spark.sessionState.sqlParser.parsePlan(sql)
    println(plan)

    println("----------------------------------------------------")
    //从catalog中加载元数据
    val df: DataFrame = spark.sql(sql)
    df.show(100, false)
    //显示物理计划和逻辑计划  显示内容可通过参数控制
    df.explain("extended")
    println("-----------------------------")
    df.explain("codegen")
  }
}
