package com.bw.sparksql1.job3

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
  *
  */
object Job5 {
    def main(args: Array[String]): Unit = {
      Logger.getLogger("org").setLevel(Level.ERROR)
      val conf = new SparkConf().setAppName("sparksql").setMaster("local")
      val spark = SparkSession.builder().config(conf).getOrCreate()
      import spark.implicits._
      val personDS = spark.read.json("person.json").as[Person5]
//      println(personDS)
//      personDS.printSchema()
      personDS.createOrReplaceTempView("person")
      spark.sql("select count(*) from person").show()

    }
}
case class Person5(name: String, age: Long)