package org.xi.maple

import org.apache.commons.lang3.StringUtils
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.slf4j.{Logger, LoggerFactory}
import org.xi.maple.api._
import org.xi.maple.model.MapleData
import org.xi.maple.util.PluginUtil

import java.io.IOException
import java.nio.file.{Files, Paths}
import scala.collection.JavaConverters.mapAsScalaMapConverter
import scala.collection.mutable
import scala.util.{Failure, Success, Try}

object MapleApp {

  private val log: Logger = LoggerFactory.getLogger(MapleApp.getClass)

  def main(args: Array[String]): Unit = {
    var argsMap = mutable.Map[String, String]()
    argsMap = ArgsParser.getParams(args)

    val config: String = if (argsMap.contains("data")) argsMap("data") else getContent(argsMap("file"))
    val mapleData = MapleData.getData(config)
    argsMap.remove("data")
    argsMap.remove("file")

    val spark = SparkSession.builder().config(createSparkConf(mapleData)).getOrCreate()
    execute(spark, argsMap, mapleData)

    spark.close()
  }

  @throws[IOException]
  def getContent(path: String): String = {
    if (StringUtils.isBlank(path)) throw new ConfigRuntimeException("Path cannot be empty")
    val filePath = Paths.get(path)
    val bytes = Files.readAllBytes(filePath)
    new String(bytes)
  }

  private def createSparkConf(mapleData: MapleData) = {
    val sparkConf = new SparkConf()
    sparkConf.setAppName("maple").setMaster("local[*]")
    mapleData.getEnv.asScala.foreach(item => sparkConf.set(item._1, item._2))
     System.setProperty("HADOOP_USER_NAME", "azkaban")
    sparkConf
  }

  def execute[SR <: SourceConfig, TR <: TransformConfig, SK <: SinkConfig](spark: SparkSession, argsMap: mutable.Map[String, String], mapleData: MapleData): Unit = {
    val sources = mapleData.getSources.map(source => PluginUtil.createSource[SR](source.getName, source.getConfig))
    val transformations = mapleData.getTransformations.map(sink => PluginUtil.createTransform[TR](sink.getName, sink.getConfig))
    val sinks = mapleData.getSinks.map(sink => PluginUtil.createSink[SK](sink.getName, sink.getConfig))

    checkConfig(sources, transformations, sinks)

    val execution = new MapleExecution(spark, argsMap)
    execution.execute(sources, transformations, sinks)
  }

  private def checkConfig[SR <: SourceConfig, TR <: TransformConfig, SK <: SinkConfig](sources: Array[MapleSource[SR]], transformations: Array[MapleTransform[TR]], sinks: Array[MapleSink[SK]]): Unit = {
    val set = mutable.Set[String]()

    def checkTable(table: String): Unit = {
      if (StringUtils.isBlank(table))
        return
      if (set.contains(table)) {
        log.error("Result table cannot be duplicate")
        System.exit(-1)
      } else {
        set.add(table)
      }
    }

    def check[T <: MaplePlugin[_]](plugins: Array[T]): Unit = {
      for (plugin <- plugins) {
        plugin match {
          case source: MapleSource[_] => checkTable(source.getConfig.asInstanceOf[SourceConfig].getResultTable)
          case transform: MapleTransform[_] => checkTable(transform.getConfig.asInstanceOf[TransformConfig].getResultTable)
          case _ =>
        }
        val (success, msg) = Try(plugin.checkPluginConfig) match {
          case Success(result) => result
          case Failure(exception) => (false, exception.getMessage)
        }
        if (!success) {
          log.error(msg)
          System.exit(-1)
        }
      }
    }

    check(sources)
    check(transformations)
    check(sinks)
  }
}
