package com.gitee.dufafei.spark.listener

import org.apache.spark.Dependency
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.catalyst.plans.logical.{Deduplicate, GlobalLimit, InsertIntoTable, Join, LocalLimit, LogicalPlan, Project, Union}
import org.apache.spark.sql.execution.QueryExecution
import org.apache.spark.sql.execution.command.CreateViewCommand
import org.apache.spark.sql.execution.datasources.{CreateTable, HadoopFsRelation, LogicalRelation, SaveIntoDataSourceCommand}
import org.apache.spark.sql.execution.datasources.jdbc.{JDBCRelation, JdbcRelationProvider}
import org.apache.spark.sql.execution.streaming.{ConsoleRelation, ConsoleSinkProvider}

import scala.collection.mutable

/**
 * 输入：
 *  CataLogRelation
 *  UnresolvedRelation
 * 输出：
 *  CreateTable
 *  InsertIntoTable
 */
class ISparkSqlParser(qe: QueryExecution) {

  def getCurrentDB: String = qe.sparkSession.sessionState.catalog.getCurrentDatabase

  def resolveLogicPlan(): (mutable.Set[TableVertex], mutable.Set[TableVertex]) = {
    val inputTables = mutable.Set[TableVertex]()
    val outputTables = mutable.Set[TableVertex]()
    resolveLogic(qe.logical, inputTables, outputTables)
    (inputTables, outputTables)
  }

  def resolveLogic(plan: LogicalPlan,
                   inputTables: mutable.Set[TableVertex],
                   outputTables: mutable.Set[TableVertex]): Unit = {
    plan match {
      case plan: Project =>
        resolveLogic(plan.child, inputTables, outputTables)
      case plan: UnresolvedRelation =>
        val db = plan.tableIdentifier.database.getOrElse(getCurrentDB)
        val table = plan.tableIdentifier.table
        val tableVertex = TableVertex("", db, table, Map())
        inputTables.add(tableVertex)
      case plan: CreateTable =>
        if (plan.query.isDefined) {
          resolveLogic(plan.query.get, inputTables, outputTables)
        }
        val tableIdentifier = plan.tableDesc.identifier
        val db = tableIdentifier.database.getOrElse(getCurrentDB)
        val table = tableIdentifier.table
        val tableVertex = TableVertex("", db, table, Map())
        outputTables.add(tableVertex)
      case plan: CreateViewCommand =>
        val tableVertex = TableVertex("view", "", plan.name.table, Map())
        outputTables.add(tableVertex)
        resolveLogic(plan.child, inputTables, outputTables)
      case plan: InsertIntoTable =>
        resolveLogic(plan.table, outputTables, inputTables)
        resolveLogic(plan.query, inputTables, outputTables)
      case plan: LogicalRelation => plan.relation match {
        case _: ConsoleRelation =>
        case _: HadoopFsRelation =>
        case r: JDBCRelation =>
          // val url = r.jdbcOptions.url
          val tableOrQuery = r.jdbcOptions.tableOrQuery
          if (isQuery(tableOrQuery)) {
            //
          } else {
            val tableVertex = TableVertex("", "", tableOrQuery, Map())
            inputTables.add(tableVertex)
          }
      }
      case plan: SaveIntoDataSourceCommand =>
        plan.dataSource match {
          case _: ConsoleSinkProvider =>
          case _: JdbcRelationProvider =>
            val options = plan.options
            // val url = options("url")
            val table = options("dbtable")
            val tableVertex = TableVertex("", "", table, Map())
            outputTables.add(tableVertex)
        }
        resolveLogic(plan.query, inputTables, outputTables)
      case plan: Join => // join
        resolveLogic(plan.left, inputTables, outputTables)
        resolveLogic(plan.right, inputTables, outputTables)
      case plan: Union => // union
        plan.children.foreach(x => resolveLogic(x, inputTables, outputTables))
      case plan: Deduplicate => // 去重操作
        resolveLogic(plan.child, inputTables, outputTables)
      case plan: Project =>
        resolveLogic(plan.child, inputTables, outputTables)
      case plan: LocalLimit =>
        resolveLogic(plan.child, inputTables, outputTables)
      case plan: GlobalLimit =>
        resolveLogic(plan.child, inputTables, outputTables)
      case `plan` => println("******child plan******:\n" + plan.getClass.getName)
    }
  }

  /**
   * 判断rdd依赖关系：
   *  某些源表解析出来的类型为LogicRDD,不能直接获取到表名
   *  example:
   *   schema = StructType([StructField('id', IntegerType(), True),...])
   *   rdd = sparkSession.sparkContext.textFile('/\\*').map(r->r.split(',')).map(p->Row(p[0], p[1]))
   *   df = sparkSession.createDataFrame(rdd, schema)
   *   df.createOrReplaceTempView('tdl_spark_test')
   *   sparkSession.sql('create table tdl_file_test as select \\* from tdl_spark_test')
   *  解决思路:
   *   1.调用textFile时记录产生的rdd
   *   2.解析df的logicplan时获取其rdd
   *   3.判断之前产生的rdd是否为当前rdd的祖先
   *   4.如果是则将之前rdd对应的表名计入
   */
  def checkRddRelationShip(rdd1: RDD[_], rdd2: RDD[_]): Boolean = {
    if (rdd1.id == rdd2.id) return true
    dfsSearch(rdd1, rdd2.dependencies)
  }

  def dfsSearch(rdd1: RDD[_], dependencies: Seq[Dependency[_]]): Boolean = {
    dependencies.foreach{ dependency =>
      if (dependency.rdd.id == rdd1.id) return true
      if (dfsSearch(rdd1, dependency.rdd.dependencies)) return true
    }
    false
  }

  def isQuery(tableOrQuery: String): Boolean = tableOrQuery.contains("(")

  def getQuery(tableOrQuery: String): String = {
    val left = tableOrQuery.indexOf("(")
    val right = tableOrQuery.lastIndexOf(")")
    tableOrQuery.substring(left + 1, right)
  }
}
