package com.cike.sparkstudy.sql.scala

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 使用Scalc开发SPark程序
  * 要实现反射的RDD到DataFrame的转换需要注意Object extends App的方式
  *
  */
object RDD2DataFrameReflection extends App{

    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("RDD2DataFrameReflection")

    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)

    //在Scala中使用反射方式，进行RDD转换DataFrame,需要手动导入一个隐式转换
    import sqlContext.implicits._

    case class Student(id:Int,name:String,age:Int)

    //这里是一个普通的，元素为case class的RDD
    //直接对他使用toDF()方法即可转换DataFrame
    val studentDF = sc.textFile("/developerCodes/test/students.txt",1)
      .map{line => line.split(",")}
      .map{arr => Student(arr(0).trim.toInt,arr(1),arr(2).trim().toInt)}
      .toDF()

    //把DataFrame注册为临时表
    studentDF.registerTempTable("students")
    //对临时表数据进行查询
    val teenagerDF = sqlContext.sql("select * from students where age <=18")

    val teenagerRDD = teenagerDF.rdd

    teenagerRDD.map{row => Student(row(0).toString.toInt,row(1).toString,row(2).toString.toInt)}
      .collect()
      .foreach{stu => println(stu.id + ":" + stu.name + ":" + stu.age)}


    //通过getAs方法获取指定列名的列
    teenagerRDD.map{row => Student(row.getAs[Int]("id"),row.getAs[String]("name"),row.getAs[Int]("age"))}
      .collect()
      .foreach { stu => println(stu.id + ":" + stu.name + ":" + stu.age) }



  // 还可以通过row的getValuesMap()方法，获取指定几列的值，返回的是个map
  val studentRDD = teenagerRDD.map { row => {
    val map = row.getValuesMap[Any](Array("id", "name", "age"));
    Student(map("id").toString().toInt, map("name").toString(), map("age").toString().toInt)
    }
  }
  studentRDD.collect().foreach { stu => println(stu.id + ":" + stu.name + ":" + stu.age) }
}
