package chapter04

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{Row, SparkSession}

object Test06_DfToDs {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("dfToDs")
      .getOrCreate()
    import spark.implicits._
    //新建一个df
    val sc = spark.sparkContext
    val value = sc.makeRDD(List(("张三", 20),
      ("李四", 25), ("王五", 23)))
    val frame = value.toDF("name", "age")
    //转换成ds
    val ds = frame.as[Person]
    ds.printSchema()
    ds.show()
    //ds转换成df
    val frame1 = ds.toDF()
    frame1.show
    val rows:Array[Row] = frame1.collect()
    val persons:Array[Person] = ds.collect()
  }
}
case class Person(name:String,age:Int)
