package com.bw.sparksql1.job3

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
  *
  * RDD 和 DataFrame 编程的区别
  */
object Job4 {
    def main(args: Array[String]): Unit = {
      Logger.getLogger("org").setLevel(Level.ERROR)
      val spark = SparkSession
        .builder()
        .master("local")
        .appName("Spark SQL basic example")
        .getOrCreate()

      val personRDD = spark.sparkContext.textFile("person4.txt").map(line => line.split(",")).map(p => Person(p(0),p(1).toLong))
//      personRDD.foreach(println(_))
//      personRDD.map(p => p.name)


      //dataFrame操作
      val personDF = spark.read.json("person.json")
      //dataFrame里面的每一条数据用什么来表示？固定的row对象
      personDF.foreach(row => println(row.get(0)))  //age
      personDF.foreach(row => println(row.getString(1)))  //name
      personDF.foreach(row => println(row.getAs[String]("name")))
    }
}
case class Person(name:String,age:Long)

