package cn.spark.study.sql

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext

object RDD2DataFrameReflaction extends App{

    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("RDD2DataFrameReflaction");
    
    val sc = new SparkContext(conf)
    val sqlC = new SQLContext(sc)
    
    //在scala中要使用反射的方式将RDD转换成DataFrame要引入RDD的隐式转换
    import sqlC.implicits._
    //创建样例类
    case class Student(id:Int, name:String, age:Int)
    //拿到初始RDD 在转换成 Student 对象类型RDD
    val studentRDD = sc.textFile("C://Users//e20160504//Desktop//spark//students.json",1).map { _.split(",")}
      .map { arr => Student(arr(0).trim().toInt,arr(1).toString(),arr(2).trim().toInt) }
      //隐式转换的toDF
      .toDF().registerTempTable("student")
    
  

//    //拿到临时表对表进行操作
//    sqlC.sql("select * from student where age <= 18").rdd.map { row => Student(row(0).toString().toInt,row(1).toString(),row(2).toString().toInt) }
//      .foreach { println(_) }
    val teenegerStudentDF = sqlC.sql("select * from student where age <= 18");
      
    //获取row的内容方式之一： 根据指针 row(index)
    teenegerStudentDF.map { row  => Student(row(0).toString().toInt,row(1).toString(),row(2).toString().toInt) }.foreach { println }
    //获取row的内容方式之二： 根据标题 row(name)
    teenegerStudentDF.map { row => Student(row.getAs[Int]("id"),row.getAs[String]("name"),row.getAs[Int]("age")) }.foreach { println }
    //获取row的内容方式之三：根据valueMap
    teenegerStudentDF.map { row =>
      val map = row.getValuesMap(Array("id","name","age"))
      Student(map("id"),map("name"),map("age"))
    }.foreach { println }
    
      
      
  
}