package com.li.sparksql.rdd2dataframe

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object Rdd2DataFrameByReflect {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf();
    conf.setMaster("local");
    val session = SparkSession.builder()
      .appName("Rdd2DataFrameByReflect")
      .config(conf)
      .getOrCreate()
    val context = session.sparkContext

    val dataRDD = context.parallelize(Array(("mason1", 19), ("mason2", 29), ("mason3", 39), ("mason4", 49)))

    //使用select时候可以对数据做一些操作，需要添加隐式转换函数，否则报错
    import session.implicits._
    val df = dataRDD.map(stu => Student(stu._1, stu._2)).toDF()

    //通过dataFrame的方式操作RDD数据
    df.createOrReplaceTempView("student")
    var resDf = session.sql("select * from student")

    //在把DataFrame转回RDD
    val resRDD = resDf.rdd;

    resRDD.map(row => Student(row(0).toString, row(1).toString.toInt))
      .collect().foreach(println(_))

    resRDD.map(row => Student(row.getAs[String]("name"), row.getAs[Int]("age")))
      .collect().foreach(println(_))
    context.stop()
    session.stop()
  }
}

case class Student(name: String, age: Int)
