package org.xi.maple.transform

import com.alibaba.fastjson.JSON
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.{DataFrameReader, Dataset, Row, SparkSession}
import org.slf4j.{Logger, LoggerFactory}
import org.xi.maple.api.MapleTransform
import org.xi.maple.transform.model.NamedDatasource

import java.nio.file.{Files, Paths}
import java.util.UUID
import scala.collection.JavaConverters._
import scala.collection.mutable

class Sql extends MapleTransform[SqlConfig] {

  private val log: Logger = LoggerFactory.getLogger(classOf[Sql])

  private var sqlFinished = false
  private var resultDs: Option[Dataset[Row]] = None

  override def process(spark: SparkSession, argsMap: mutable.Map[String, String], ds: Dataset[Row]): Dataset[Row] = {
    val result = if (!sqlFinished) {
      log.info(s"Load data from query: ${config.getSql}")
      spark.sql(config.getSql)
    } else {
      resultDs.get
    }
    // todo 待优化
    if (config.getCache) {
      result.persist()
    }
    result.show()
    result
  }

  override def checkConfig: (Boolean, String) = {
    if (StringUtils.isNotBlank(config.getSql)) {
      (true, "")
    } else {
      (false, "please specify [sql]")
    }
  }

  override def prepare(spark: SparkSession, argsMap: mutable.Map[String, String]): Unit = {
    Sql.initDatasourceMap(argsMap("datasource-path"))

    val logicalPlan = spark.sessionState.sqlParser.parsePlan(config.getSql)
    val tables = logicalPlan.collect { case r: UnresolvedRelation => r.tableName }

    val datasourceSet: mutable.Set[String] = mutable.Set[String]()
    tables.foreach(table => {
      val datasourceName = getDatasourceNameFromTable(table)
      datasourceSet.add(datasourceName)
    })

    if (datasourceSet.size == 1) {
      log.info("Database in this sql is unique, read directly")
      val datasourceName = datasourceSet.head
      if (StringUtils.isNotBlank(datasourceName)) {
        val datasourcePrefix = s"${datasourceName}__"
        resultDs = Some(getReader(spark, datasourceName).option("query", config.getSql.replaceAll(datasourcePrefix, "")).load())
        sqlFinished = true
      }
    } else {
      tables.foreach(table => {
        if (Sql.TABLE_IN_SQL_MAP.contains(table)) {
          val tableAliasName = Sql.TABLE_IN_SQL_MAP(table)
          if (StringUtils.isNotBlank(tableAliasName)) {
            config.setSql(config.getSql.replaceAll(table, tableAliasName))
          }
        } else {
          val datasourceName = getDatasourceNameFromTable(table)
          if (StringUtils.isNotBlank(datasourceName)) {
            val datasourcePrefix = s"${datasourceName}__"
            val tableAliasName = table + "_" + Sql.randomTableName
            log.info(s"Register table $tableAliasName from $table")
            val df = getReader(spark, datasourceName).option("dbtable", tableAliasName.replace(datasourcePrefix, "")).load()
            df.createOrReplaceTempView(tableAliasName)
            config.setSql(Sql.replaceTableAliasName(config.getSql, table, tableAliasName)))
            Sql.TABLE_IN_SQL_MAP.put(table, tableAliasName)
          } else {
            Sql.TABLE_IN_SQL_MAP.put(table, "")
          }
        }
      })
    }
  }

  def getDatasourceNameFromTable(tableName: String): String = {
    val names = tableName.split("__")
    if (names.length == 2) names(0) else ""
  }

  def getReader(spark: SparkSession, datasourceName: String): DataFrameReader = {
    val datasource = Sql.DATASOURCE_MAP(datasourceName)
    val reader = spark.read.format("jdbc")

    if (datasource.getOptions != null && !datasource.getOptions.isEmpty) {
      reader.options(datasource.getOptions)
    }
    reader
      .option("url", datasource.getUrl)
      .option("driver", datasource.getDriver)
      .option("user", datasource.getUser)
      .option("password", datasource.getPassword)

    reader
  }
}

object Sql {
  val TABLE_IN_SQL_MAP: mutable.Map[String, String] = mutable.Map[String, String]()
  private var DATASOURCE_MAP_INITIALIZED = false
  val DATASOURCE_MAP: mutable.Map[String, NamedDatasource] = mutable.Map[String, NamedDatasource]()

  def initDatasourceMap(path: String): Unit = {
    if (DATASOURCE_MAP_INITIALIZED) return
    if (StringUtils.isBlank(path)) {
      DATASOURCE_MAP_INITIALIZED = true
      return
    }
    val filePath = Paths.get(path)
    val bytes = Files.readAllBytes(filePath)
    val value = JSON.parseObject(new String(bytes))
    for (key <- value.keySet().asScala) {
      DATASOURCE_MAP.put(key, value.getObject(key, classOf[NamedDatasource]))
    }
    DATASOURCE_MAP_INITIALIZED = true
  }

  def randomTableName: String = {
    UUID.randomUUID().toString.substring(8)
  }

  def replaceTableAliasName(query: String, tableName: String, tableAliasName: String): String = {
    val names = tableName.split("\\.")
    val (db, tb) = if (names.length == 2) (names(0), names(1)) else ("", tableName)
    query.replaceAll(s"(?<!\\S)$db\\.$tb(?!\\S)", tableAliasName)
      .replaceAll(s"`$db`\\.`$tb`", tableAliasName)
      .replaceAll(s"(?<!\\S)$db\\.`$tb`", tableAliasName)
      .replaceAll(s"`$db`\\.$tb(?!\\S)", tableAliasName)
  }
}