package com.ywps.vaas.framework.app

import com.ywps.vaas.framework.util.HiveUtil
import org.apache.spark.sql.SparkSession

trait ISparkSessionTest {
  val spark: SparkSession = {
    System.setProperty("HADOOP_USER_NAME", "root")
    SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getSimpleName)
      // 指定hive的metastore的端口  默认为9083 在hive-site.xml中查看
      .config("hive.metastore.uris", HiveUtil.getHiveMetastoreUris())
      //指定hive的warehouse目录
      .config("spark.sql.warehouse.dir", HiveUtil.getSparkSqlWarehouseDir())
      .config("spark.debug.maxToStringFields","1500")
      .config("spark.driver.maxResultSize","1024g")
      .enableHiveSupport()
      .getOrCreate()
  }
}
