import org.apache.spark.sql.{DataFrame, SparkSession}

object test {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession
      .builder()
      .config("spark.network.timeout", "1200")
      .config("spark.kryoserializer.buffer.max", "2000")
      .config("spark.executor.memory", "6g")
      .master("local[*]")
      .appName("SQLContextApp")
      .getOrCreate()
    val frame: DataFrame = sparkSession
      .read
      .json("C:\\Users\\Raichard\\Desktop\\宜春\\data1\\ZZ_VW_HOUSEHOLDSTAFFS\\20200917\\ZZ_VW_HOUSEHOLDSTAFFS_1.json")

    frame.show()
  }
}
