package org.xi.maple.sink

import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.slf4j.{Logger, LoggerFactory}
import org.xi.maple.api.MapleSink
import org.xi.maple.sink.exception.HiveSinkException

import scala.collection.mutable

class Hive extends MapleSink[HiveConfig] {

  private val log: Logger = LoggerFactory.getLogger(classOf[Hive])

  override def checkConfig: (Boolean, String) = {
    val nonExistsOptions = mutable.MutableList[String]()
    if (StringUtils.isBlank(config.getSourceTable)) {
      nonExistsOptions.+=:("sourceTable")
    }
    if (StringUtils.isBlank(config.getTargetDatabase)) {
      nonExistsOptions.+=:("targetDatabase")
    }
    if (StringUtils.isBlank(config.getTargetTable)) {
      nonExistsOptions.+=:("targetTable")
    }
    if (nonExistsOptions.isEmpty) {
      (true, "")
    } else {
      (false, "please specify " + nonExistsOptions.map { field => s"[$field]" }.mkString(", ") + " as non-empty string")
    }
  }

  def output(spark: SparkSession, argsMap: mutable.Map[String, String], ds: Dataset[Row]): Unit = {

    val sourceFields = ds.schema.fields
    val targetFields = spark.table(config.getTargetTable).schema.fields

    val sourceFieldMap = mutable.Map[String, StructField]()
    val targetFieldMap = mutable.Map[String, StructField]()
    for (sourceFiled <- sourceFields) {
      sourceFieldMap.put(sourceFiled.name, sourceFiled)
    }
    for (targetFiled <- targetFields) {
      targetFieldMap.put(targetFiled.name, targetFiled)
    }

    if (targetFields.length != sourceFields.length) {
      throw new HiveSinkException(s"${config.getTargetTable} requires that the data to be inserted have the same number of columns as the target table: target table has ${targetFields.length} column(s) but the inserted data has ${sourceFields.length} column(s)")
    }
    for (i <- sourceFields.indices) {
      if (!targetFields(i).name.equals(sourceFields(i).name) || !targetFields(i).dataType.equals(sourceFields(i).dataType)) {
        throw new HiveSinkException(s"${i}st column's name or data type of the data to be inserted does not match target table")
      }
    }

    val writer = if (config.getNumPartitions != null) {
      ds.repartition(config.getNumPartitions).write.mode(config.getSaveMode)
    } else {
      ds.write.mode(config.getSaveMode)
    }
    if (config.getOptions != null && !config.getOptions.isEmpty) {
      writer.options(config.getOptions)
    }
    log.info(s"InsertInto data to hive table: ${config.getTargetTable}")
    writer.format("hive").insertInto(config.getTargetTable)
  }
}
