package com.bw.sparksql1.job1
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  *
  */
object Job5 {
    def main(args: Array[String]): Unit = {
      Logger.getLogger("org").setLevel(Level.ERROR)
      val spark = SparkSession
        .builder()
        .master("local")
        .appName(this.getClass.getSimpleName)
        .getOrCreate()

      import spark.implicits._
      val caseClassDS = Seq(Person("Andy", 32)).toDS()
      caseClassDS.show()

      val primitiveDS = Seq(1, 2, 3).toDS()
      primitiveDS.map(_ + 1).collect()

      val personDS = spark.read.json("person.json").as[Person]
      println(personDS)
      personDS.printSchema()
      personDS.createOrReplaceTempView("person")
      spark.sql("select count(*) from person").show()
    }
}
case class Person(name: String, age: Long)