package com.doit.sparksql.day01

import org.apache.spark.sql.DataFrame


/**
 * @DATE 2022/1/13/14:40
 * @Author MDK
 * @Version 2021.2.2
 * */
object SQL_Parquet02 {
  def main(args: Array[String]): Unit = {
    val spark = SQLUtil.getSession
    val df1: DataFrame = spark.read.parquet("sql_data/parquet/")
    val df2: DataFrame = spark.read.orc("sql_data/orc/")

    df1.printSchema()
    df1.show(100,false)
    println("----------------------------------------------")
    df2.printSchema()
    df2.show(100, false)

    spark.close()
  }
}
