package com.dataworker.spark.sql

import com.dataworker.spark.sql.kyuubi._
import com.dataworker.spark.sql.parser.DataworkSparkSqlParser
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSessionExtensions

class DataworkSqlExtensions extends (SparkSessionExtensions => Unit) with Logging {

  override def apply(extensions: SparkSessionExtensions): Unit = {
    extensions.injectParser { (session, parser) =>
      new DataworkSparkSqlParser(session, parser)
    }

    extensions.injectPostHocResolutionRule(FinalStageConfigIsolationCleanRule)

    extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)
    extensions.injectQueryStagePrepRule(FinalStageConfigIsolation(_))
    extensions.injectPostHocResolutionRule(RebalanceBeforeWritingDatasource)
    extensions.injectPostHocResolutionRule(RebalanceBeforeWritingHive)

    extensions.injectPlannerStrategy(LimitHivePartitionStrategy)
  }
}
