package com.wubai.spark.security.bigdata

import com.wubai.spark.security.hadoop.LoginUtil
import org.apache.log4j.Logger
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.storage.StorageLevel

import java.io.File
import scala.io.Source

/**
 * 执行spark-sql类
 * spark-submit driver cluster
 *
 * @param userPrincipal ,userKeytabPath,krb5ConfPath
 * @return
 * @author 500佰
 */

object ExecSqlc {
  val logger = LoginUtil.LOG

  // Table structure
  def readSparkSQLFile(sparkSqlPath: String, variables: Map[String, String], sparkSession: SparkSession): String = {
    val source = Source.fromFile(sparkSqlPath) // driver client model
    try {
      val sqlString = source.getLines().map { line =>
        variables.foldLeft(line) { (acc, entry) =>
          acc.replaceAllLiterally(s"$$${entry._1}", entry._2)
        }
      }.mkString("\n")

      sqlString
    } finally {
      source.close()
    }
  }

  def main(args: Array[String]) {
    val logger = Logger.getLogger(getClass.getName)
    val paramMap = args.map(_.split("=", 2)).collect {
      case Array(key, value) if key.startsWith("--") => key.stripPrefix("--") -> value.trim
    }.toMap
    val userPrincipal = paramMap.getOrElse("userPrincipal", "")
    //    val userKeytabPath = paramMap.getOrElse("userKeytabPath", "")
    //    val krb5ConfPath = paramMap.getOrElse("krb5ConfPath", "")
    val sparkSqlPath = paramMap.getOrElse("sparkSqlPath", "")

    val filePath = System.getProperty("user.dir") + File.separator + sparkSqlPath.split("/").last
    val params = paramMap.getOrElse("params", "").split(",").map(_.split("=")).map(arr => arr(0) -> arr(1)).toMap
    //    try {
    LoginUtil.loginWithUserKeytab(userPrincipal)
    logger.info("认证成功！")
    // Configure SparkSession & Spark application name
    val appName = sparkSqlPath.split("/").last.split("\\.")(0)
    val spark = SparkSession
      .builder()
      .appName(s"ExecUserSqlc-${appName}")
      .getOrCreate()
    //sparkSql Content
    val sqlContent = readSparkSQLFile(filePath, params, spark)
    val sqlc: String = sqlContent.stripMargin //format
    logger.info(s"执行语句：${sqlc}")
    logger.info(s"开始执行：${sparkSqlPath}")
    val sqlStatements: Array[String] = sqlc.split(";")
    //遍历顺序执行
    for (statement <- sqlStatements) {
      val Info: DataFrame = spark.sql(statement).distinct().persist(StorageLevel.MEMORY_AND_DISK)
      logger.info(s"执行结果：${Info.show(2)}")
    }
    spark.stop()
    //      System.exit(0)
    //    } catch {
    //      case ex: Exception => logger.error("发生了异常：" + ex.getMessage)
    //        System.exit(1)
    //    }
  }
}
