import org.apache.spark.sql.SparkSession

object ParquetReader {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("地理位置字典geohash编码转化").master("local").getOrCreate()
    val df = spark.read.parquet("dw_etl/data/area_dict")
    df.show(10,false)

    spark.close()
  }

}
