package com.jscloud.sparksql

import org.apache.spark.sql.SparkSession

object Demo4forparquet {
  def main(args: Array[String]): Unit = {
    //创建sparksession
    val spark = SparkSession.builder()
      .appName("demo3")
      /*
      Sets a config option. Options set using this method are automatically propagated to both SparkConf and SparkSession's own configuration.
      Since: 2.0.0
       */
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .master("local[*]")
      .getOrCreate()

    spark.sparkContext.setLogLevel("warn")

    //    val usersDF = spark.read.parquet("file:////appopthomework/install/spark-3.1.3-bin-hadoop3.2/examples/src/main/resources/users.parquet")
    //此文件从这里获得spark-3.1.3-bin-hadoop3.2/examples/src/main/resources/users.parquet
    val usersDF = spark.read.parquet(this.getClass.getClassLoader.getResource("users.parquet").getPath)
    //打印schema信息
    usersDF.printSchema

    /**
     * root
     * |-- name: string (nullable = true)
     * |-- favorite_color: string (nullable = true)
     * |-- favorite_numbers: array (nullable = true)
     * |    |-- element: integer (containsNull = true)
     */

    //展示数据
    usersDF.show

    /**
     * +------+--------------+----------------+
     * |  name|favorite_color|favorite_numbers|
     * +------+--------------+----------------+
     * |Alyssa|          null|  [3, 9, 15, 20]|
     * |   Ben|           red|              []|
     * +------+--------------+----------------+
     */
    spark.stop()
  }

}
