package com.guchenbo.spark.sql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
 * @author guchenbo
 * @date 2021/12/28
 */
object SparkUtils {

  def sparkSession(appName: String): SparkSession = {
    SparkSession.builder().master("local[*]").appName(appName)
      .getOrCreate()
  }

  def sparkSessionTd(appName: String): SparkSession = {
    SparkSession.builder().appName(appName)
      .master("local[4]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://ark150:9083")
      .config("hive.exec.scratchdir", "/tmp/hive")

      .getOrCreate()
  }

  def printPart(rdd: RDD[_]): Unit = {
    println(s"part len ${rdd.getNumPartitions}, parts is ${rdd.partitions.mkString("Array(", ", ", ")")}")
  }

  def classpath(path: String): String = {
    s"spark-sql/src/main/resources/$path"
  }
}
