package com.ywps.vaas.framework.app

import com.ywps.vaas.framework.util.{EnvUtil, HiveUtil}
import org.apache.spark.sql.SparkSession
import org.slf4j.LoggerFactory

/**
 * 应用层接口
 */

trait TApplication {

  val logger = LoggerFactory.getLogger(this.getClass)

  /**
   * @param master master名称
   * @param app    App名称
   * @param op     调度逻辑
   */
  def start(master: String = "local[*]", app: String = this.getClass.getSimpleName)
           (op: => Unit): Unit = {
    // 1. 获取环境变量，放入共享内存中
    System.setProperty("HADOOP_USER_NAME", "root")
    val sparkSession = SparkSession.builder().master(master).appName(app)
      // 指定hive的metastore的端口  默认为9083 在hive-site.xml中查看
      .config("hive.metastore.uris", HiveUtil.getHiveMetastoreUris())
      //指定hive的warehouse目录
      .config("spark.sql.warehouse.dir", HiveUtil.getSparkSqlWarehouseDir())
      .config("spark.debug.maxToStringFields","1500")
      .config("spark.driver.maxResultSize","1024g")
      .enableHiveSupport().getOrCreate()
    //引入隐式转换规则
    EnvUtil.put(sparkSession)
    // 2. 传入Controller层调度逻辑
    try {
      op //调度代码
    } catch {
      case ex => logger.error("---------------------" + ex.printStackTrace())
    } finally {
      // 3.资源的关闭释放
      sparkSession.close()
      EnvUtil.remove()
    }
  }
}
