package org.xi.maple

import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.xi.maple.api.{ConfigRuntimeException, _}

import scala.collection.mutable

class MapleExecution(spark: SparkSession, argsMap: mutable.Map[String, String]) {
  def execute[SR <: SourceConfig, TR <: TransformConfig, SK <: SinkConfig]
  (sources: Array[MapleSource[SR]], transformations: Array[MapleTransform[TR]], sinks: Array[MapleSink[SK]]): Unit = {
    if (sources != null && !sources.isEmpty) {
      for (source <- sources) {
        source.prepare(spark, argsMap)
        registerSourceTempView(source)
      }
    }

    var ds = if (!sources.isEmpty) sources(0).getData(spark, argsMap) else null

    if (transformations != null && !transformations.isEmpty) {
      for (transformation <- transformations) {
        transformation.prepare(spark, argsMap)
        ds = transformProcess(transformation, ds)
        registerTransformTempView(transformation, ds)
      }
    }

    if (sinks != null && !sinks.isEmpty) {
      for (sink <- sinks) {
        sink.prepare(spark, argsMap)
        sinkProcess(sink, ds)
      }
    }
  }

  private def registerSourceTempView[T <: SourceConfig](source: MapleSource[T]): Unit = {
    val resultTable = source.getConfig.getResultTable
    if (StringUtils.isBlank(resultTable)) {
      throw new ConfigRuntimeException("Plugin[" + source.getClass.getName + "] must be registered as dataset/table, please set \"result_table_name\" config")
    } else {
      val ds = source.getData(spark, argsMap)
      if (ds != null) ds.createOrReplaceTempView(resultTable)
    }
  }

  private def transformProcess[T <: TransformConfig](transform: MapleTransform[T], ds: Dataset[Row]): Dataset[Row] = {
    val sourceTable = transform.getConfig.getSourceTable
    val fromDs = if (StringUtils.isBlank(sourceTable)) ds else spark.read.table(sourceTable)
    transform.process(spark, argsMap, fromDs)
  }

  private def registerTransformTempView[T <: TransformConfig](transform: MapleTransform[T], ds: Dataset[Row]): Unit = {
    val resultTable = transform.getConfig.getResultTable
    if (StringUtils.isNotBlank(resultTable) && ds != null) {
      ds.createOrReplaceTempView(resultTable)
    }
  }

  private def sinkProcess[T <: SinkConfig](sink: MapleSink[T], ds: Dataset[Row]): Unit = {
    val sourceTable = sink.getConfig.getSourceTable
    val fromDs = if (StringUtils.isBlank(sourceTable)) ds else spark.read.table(sourceTable)
    sink.output(spark, argsMap, fromDs)
  }
}
