package SparkSQL2

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object SparkSQL2Test {
  case class Person(var name:String,var age:Int)
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local")
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config(conf)
      .getOrCreate()

    //val df: DataFrame = spark.read.json("/Users/hongyi/IdeaProjects/SparkSQLTest/src/main/scala/SparkSQL2/person.json")
    //df.show()

    //获取rdd
    val fileRdd: RDD[String] = spark.sparkContext.textFile("/Users/hongyi/IdeaProjects/SparkSQLTest/src/main/scala/SparkSQL2/people.txt")
    val peopleRdd: RDD[Person] = fileRdd.map(_.split(",")).map(p=>Person(p(0),p(1).toInt))
    //使用面相对象编程
    peopleRdd.filter(p=>{
      if(p.age>20){
        true
      }else{
        false
      }
    }).foreach(println)
    import spark.implicits._
    val peopleDF: DataFrame = peopleRdd.toDF()
    peopleDF.createOrReplaceTempView("people")
    import spark.sql
    sql("select * from people").show()

    val peopleDs: Dataset[Person] = peopleDF.as[Person]
    peopleDs.createOrReplaceTempView("p")
    sql("select * from p").show()
    peopleDs.filter(p=>{
      if(p.age>20){
        true
      }else{
        false
      }
    }).show()
  }
}
