package com.ir.stat.spark.statistics

import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogStatistics
import org.json4s._
import org.json4s.jackson.Serialization.write
import org.slf4j.LoggerFactory

import scala.beans.BeanProperty
import scala.collection.mutable.ArrayBuffer
import scala.collection.{JavaConverters, mutable}

object StatisticsFacade {

  val logger = LoggerFactory.getLogger(classOf[StatisticsFacade])

  val sessionCache = new mutable.HashMap[String, SparkSession]

  private[this] def getSession(key: String): SparkSession = {
    if (sessionCache.contains(key))
      sessionCache(key)
    else {
      val conf = new SparkConf().setAppName("spark-data-statistics-" + key)
      val local = System.getenv("LOCAL")
      if ("true".equalsIgnoreCase(local)) {
        conf.setMaster("local")
      } else {
        val jars = System.getenv("SPARK_YARN_JARS")
        if (jars == null || jars.isEmpty) {
          logger.info("#### SYSTEM ENV ####")
          sys.env.foreach { case (k, v) => logger.info(s"$k : $v") }
          throw new RuntimeException("SPARK_YARN_JARS can't be null!")
        }
        var queue = System.getenv("SPARK_YARN_QUEUE")
        if (queue == null || queue.isEmpty) {
          queue = "default"
          logger.info(s"using yarn default queue: $queue")
        }
        conf.setMaster("yarn").set("spark.yarn.queue", queue).set("spark.yarn.jars", jars).set("spark.hadoop.yarn.timeline-service.enabled", "false")
      }
      val session = SparkSession.builder().config(conf).getOrCreate()
      sessionCache.put(key, session)
      session
    }
  }

  private[this] def regist(table: String,
                           schema: mutable.Map[String, String],
                           filePath: String,
                           format: String,
                           options: mutable.Map[String, String],
                           session: SparkSession) = {
    var sql = new ArrayBuffer[String]()
    sql.append("CREATE TABLE")
    sql.append(table)

    if (schema != null && schema.size > 0) {
      var columns = new ArrayBuffer[String]()
      sql.append("(")
      for ((f, t) <- schema) {
        columns.append(f + " " + t.toUpperCase)
      }
      sql.append(columns.mkString(", "))
      sql.append(")")
    }

    sql.append("USING")
    sql.append(format.toUpperCase())

    if (!options.isEmpty) {
      sql.append("OPTIONS (")
      var formatSetting = new ArrayBuffer[String]()
      for ((k, v) <- options) {
        formatSetting.append("'" + k + "'='" + v + "'")
      }
      sql.append(formatSetting.mkString(", "))
      sql.append(")")
    }

    sql.append("LOCATION")
    sql.append("\"" + filePath + "\"")

    val sqlText = sql.mkString(" ")
    session.sql(sqlText)
    logger.info("create table " + table + " sql with " + sqlText)
  }

  private[this] def analyze(table: String,
                            columns: mutable.Map[String, String],
                            deep: Boolean,
                            session: SparkSession) = {
    var sql = new ArrayBuffer[String]()
    sql.append("ANALYZE TABLE")
    sql.append(table)
    sql.append("COMPUTE STATISTICS")

    if (deep && columns != null && columns.size > 0) {
      sql.append("FOR COLUMNS")
      sql.append(columns.keys.mkString(","))
    }

    val sqlText = sql.mkString(" ")
    session.sql(sqlText)
    logger.info("analyze table " + table + " sql with " + sqlText)
  }

  private[this] def exist(table: String, session: SparkSession): Boolean = {
    session.sessionState.catalog.tableExists(TableIdentifier(table))
  }

  private[this] def drop(table: String, session: SparkSession) = {
    var sql = new ArrayBuffer[String]()
    sql.append("DROP TABLE IF EXISTS")
    sql.append(table)
    val sqlText = sql.mkString(" ")
    session.sql(sqlText)
    logger.info("drop table " + table + " sql with " + sqlText)
  }

  private[this] def getStat(table: String, session: SparkSession): CatalogStatistics = {
    val meta = session.sessionState.catalog.getTableMetadata(TableIdentifier(table))
    if (meta != null) {
      return meta.stats.get
    }
    null
  }

  def doAnalyze(table: String, _columns: java.util.Map[String, String], filePath: String, format: String, _options: java.util.Map[String, String], deep: Boolean, tenant: String) = {
    val session = getSession(tenant)
    val columns = JavaConverters.mapAsScalaMapConverter(_columns).asScala
    val options = JavaConverters.mapAsScalaMapConverter(_options).asScala
    if (!exist(table, session)) regist(table, columns, filePath, format, options, session)
    analyze(table, columns, deep, session)
  }

  def getAnalyzeResult(table: String, tenant: String): String = {
    val session = getSession(tenant)
    val meta = getStat(table, session)
    if (meta != null) {
      write(meta)
    } else {
      null
    }
  }

  implicit val formats = DefaultFormats

  def main(args: Array[String]): Unit = {
    val schema = null
    val table = "test_table_" + System.currentTimeMillis()
    val options = mutable.Map("sep" -> ",")
    //        doAnalyze(table, schema, "/tmp/test1/123", "csv", options, "default")
    //        var ret = getAnalyzeResult(table, "default")
    //    while (ret == null) {
    //      println("ret no ready, wait ...")
    //      Thread.sleep(3000);
    //      ret = getAnalyzeResult(table, "default")
    //    }
    //    println(ret)
    val session = getSession("test")
    regist(table, schema, "/tmp/collecter/c1/test_mysql/test_nokia123_test_mysql", "PARQUET", options, session)
    analyze(table, schema, false, session)
    //    drop(table, session)
    val meta = getStat(table, session)
    println(write(meta))
    //        val testC = new StatisticsFacade(100, "joey")
    //    println(write(meta))
  }
}

class StatisticsFacade(@BeanProperty var id: Int, @BeanProperty var name: String) {

  override def toString: String = super.toString
}