package sparkSQL.study

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object DF_RDD {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("DataFrame_RDD...").setMaster("local[*]")
    val sparkSession = SparkSession.builder().config(conf).getOrCreate()
    import sparkSession.implicits._

    // TODO DataFrame <==> RDD

    val rdd = sparkSession.sparkContext.makeRDD(List(("zahngsan", 18), ("lisi", 20)))
    val dataFrame = rdd.toDF("name", "age")
    dataFrame.show()
    val rdd1 = dataFrame.rdd
    rdd1.collect().foreach(println)

    sparkSession.stop()

  }

}
