package com.bw.sparksql1.job1
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  *
  */
object Job4 {
    Logger.getLogger("org").setLevel(Level.ERROR)

    def main(args: Array[String]): Unit = {

      val conf = new SparkConf().setAppName("sparksql").setMaster("local")
      val spark = SparkSession.builder().config(conf).getOrCreate()
      val personRDD = spark.sparkContext.textFile("person4.txt").map(line => line.split(",")).map(p => Person(p(0),p(1).trim().toLong))
      println(personRDD)
      personRDD.foreach(println(_))

      //spark.read.json
      val personDataFrame = spark.read.json("person.json")
      //显示dataframe里面的每一条数据，row表示一行数据
      personDataFrame.foreach(row => println(row.get(0)))
      personDataFrame.foreach(row => println(row.getString(1)))
      personDataFrame.foreach(row => println(row.getAs[String]("name")))


    }
}
case class Person(name:String,age:Long)
