package com.guchenbo.spark.sql

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, With}
import org.apache.spark.sql.execution.SparkSqlParser
import org.apache.spark.sql.internal.SQLConf

import scala.collection.mutable.ListBuffer

/**
 * SparkSqlParser
 *
 * @author guchenbo
 */
object SparkSqlParser {

  /**
   * 获取sql语句中的表名
   *
   * @param sqlText sql
   * @return 表名集合
   */
  def getFullTable(sqlText: String): Set[String] = {
    val conf = SQLConf.get
    val rule = new Rule
    println(rule.hashCode())

    val sqlParser = new SparkSqlParser(conf)
    val logicalPlan = sqlParser.parsePlan(sqlText)
    //    println(logicalPlan)
    logicalPlan.transformDown(rule)
    rule.list.toSet
  }

  private def tableIdentifier(tableIdentifier: TableIdentifier): String = {
    val database = tableIdentifier.database
    if (database.isDefined) s"${database.get}.${tableIdentifier.table}" else s"${tableIdentifier.table}"
  }

  /**
   * 树遍历中对每个节点的操作
   */
  class Rule extends PartialFunction[LogicalPlan, LogicalPlan] {
    var list = new ListBuffer[String]

    override def isDefinedAt(x: LogicalPlan): Boolean = x.isInstanceOf[LogicalPlan]

    override def apply(v1: LogicalPlan): LogicalPlan = {
      v1 match {
        case relation: UnresolvedRelation => {
          list += tableIdentifier(relation.tableIdentifier)
          relation
        }
        case withNode: With => {
          withNode.innerChildren.foreach(l => {
            l.transformDown(this)
          })
          withNode
        }
        case plan => plan
      }
    }
  }
}
