package sparkSQL.study

import org.apache.spark.SparkConf
import org.apache.spark.sql.{Dataset, SparkSession}

object DF_DS {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("DataFrame_DateSet...").setMaster("local[*]")
    val sparkSession = SparkSession.builder().config(conf).getOrCreate()
    import sparkSession.implicits._

    // TODO DataFrame <==> DataSet

    val rdd = sparkSession.sparkContext.makeRDD(List(("zahngsan", 18), ("lisi", 20)))
    val dataFrame = rdd.toDF("name", "age")
    dataFrame.show()

    val ds = dataFrame.as[UserDF_DS]
    ds.show()
    val dataFrame1 = ds.toDF()
    dataFrame1.show()

    sparkSession.stop()

  }
  case class UserDF_DS( name:String, age:Int)

}
