package com.zhang.sparksql_2

import org.apache.spark.sql.SparkSession

/**
 * @title:
 * @author: zhang
 * @date: 2022/2/19 09:36 
 */
object SparkSQL01_Env {

  def main(args: Array[String]): Unit = {

    //todo 获取执行环境
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName("DataFrame")
      .getOrCreate()

    import spark.implicits._

    spark.read.load("data/users.parquet").show()

    // todo 关闭环境
    spark.stop()

  }
}
