package com.offcn.bigdata.sql.p1

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
  * spark三种编程模型之间的互相转换
  * rdd dataframe  dataset
  */
object _03ProgramModelConversionOps {
    def main(args: Array[String]): Unit = {
        val spark = SparkSession.builder()
            .appName("_03ProgramModelConversionOps")
            .master("local[*]")
            .getOrCreate()

        val persons = List(
            People("张皓", 15, 169.5),
            People("冯凡", 25, 179.5),
            People("单松", 12, 109.5),
            People("林博", 25, 139.5)
        )
        import spark.implicits._
        val personRDD:RDD[People] = spark.sparkContext.parallelize(persons)

        val pdf = personRDD.toDF
        pdf.show()
        val ds = personRDD.toDS
        ds.show()
        println("----------df------------")
        /*
            为什么dataframe不能直接转化为dataset
            dataframe中的每一行记录为Row对象，该对象不是一个case class
         */
        pdf.rdd
        println("----------ds------------")
        ds.rdd
        ds.toDF("n", "a", "h").show()

        spark.stop()
    }
}
