package sparkSQL.study

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object RDD_DS {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("DataSet_RDD...").setMaster("local[*]")
    val sparkSession = SparkSession.builder().config(conf).getOrCreate()
    import sparkSession.implicits._

    // TODO RDD <==> DataSet

    val rdd = sparkSession.sparkContext.makeRDD(List(("zahngsan", 18), ("lisi", 20)))
    val ds = rdd.map {
      case (name, age) => {
        UserRDD_DS(name, age)
      }
    }.toDS()
    ds.show()
    val rdd1 = ds.rdd
    rdd1.collect().foreach(println)

    sparkSession.stop()

  }
  case class UserRDD_DS(name:String, age:Int)
}
