package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

object Test03_RDDToDF {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("RDDToDF")
      .getOrCreate()
    //如果跨类型转化
    import spark.implicits._
    //定义一个rdd
    val sc = spark.sparkContext
    val value = sc.makeRDD(List(("张三", 20),
      ("李四", 30), ("王五", 25)))
    val df = value.toDF("name","age")
    df.show()
    //DataFrame转换成RDD
    val df1 = spark.read.json("input/score.json")
    val rdd = df1.rdd
    //如何处理row类型
    val value1 = rdd.map(e => (e.getAs[String](0),
      e.getAs[Int](1), e.getAs[String](2)))
    val value2 = rdd.map(e => (e.getAs[String]("name"),
    e.getAs[String]("grade")))
    println(value1.collect().toList)
    println(value2.collect().toList)
    println(rdd.collect().toList)
    //DataSet 是一个特殊的DataFrame DataSet中封装是对象
    //rdd转换为DataSet
    val value3 = value.map(e=>User(e._1,e._2)).toDS()
    value3.show
    //DataSet转换成RDD
    val rdd1 = value3.rdd
    println(rdd1.map(e=>(e.name,e.age)).collect().toList)
  }
}
case class User(name:String,age:Int)