package cn.doitedu.test

import org.apache.spark.sql.SparkSession

object ParquetReader {
  def main(args: Array[String]): Unit = {

    System.setProperty("user.name", "root")
    System.setProperty("HADOOP_USER_NAME","root")

    val spark = SparkSession.builder()
      .config("spark.sql.shuffle.partitions","2")
      .appName("地理位置知识库加工")
      .master("local")
      .getOrCreate()


    val df = spark.read.parquet("dataware/data/geodict")
    df.show(100,false)


    spark.close()


  }

}
