package hivetohbase_scala

import org.apache.spark.sql.SparkSession

object HiveUtils {


  def hiveConnect(hiveUrl: String, sysUser: String)={
    //需要将主机名改为和环境一致的主机名，方便后面文件的写入，预防权限问题
    System.setProperty("user.name", sysUser)
    System.setProperty("HADOOP_USER_NAME", sysUser)
    val spark = SparkSession
      .builder()
      .appName("sparkhive")
      .config("spark.sql.warehouse.dir", hiveUrl)
      //      .master("spark://192.168.1.171:7077")
      .master("local[2]")
      .enableHiveSupport()
      .getOrCreate()
    spark


  }



}
