

import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

/**
 * @Author: Cheng 
 * @Date: 2023/12/19 15:34
 */
object SparkUtils {
//  var usersPath = "hdfs://node1:8020/users_dataset.csv"
//  var stationsPath = "hdfs://node1:8020/stations_dataset.csv"
//  var chargingPath = "hdfs://node1:8020/charging_record_dataset.csv"
  var usersPath = "data/users_dataset.csv"
  var stationsPath = "data/stations_dataset1.csv"
  var chargingPath = "data/hoteldata.csv"

  var url = "jdbc:mysql://localhost:3306/wu?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=UTC"
  var password = "root"

  def getSparkSession(): SparkSession = {
    val spark = SparkSession.builder()
      .appName("charging-station")
      .master("local[2]")
//      .master("spark://node1:7077")
      .getOrCreate()
    // 日志级别
    spark.sparkContext.setLogLevel("WARN")
    spark
  }

  def getSparkContext: SparkContext = {
    getSparkSession().sparkContext
  }

  def main(args: Array[String]): Unit = {
    print(getSparkSession())
  }
}
