package com.wudl.sparksql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SparkSession}

/**
 * 将 Df 转化为Rdd
 */

case class User(name: String, age: Int)

object DfToRdd {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder().appName("dfToRdd").master("local[1]").getOrCreate()
// 直接从一个Scala的集合转到df
    import spark.implicits._
//    val df = (1 to 10).toDF("number")
//    df.show()
//    // & df 转Rdd
//    val rdd:RDD[Row] = df.rdd
//    rdd.collect().foreach(println)


    // 将df 转化为Rdd
    val df = spark.read.json("F:\\ideaWorkSpace2020\\learning\\SparkStudy\\SparkSql\\src\\main\\resources\\people.json")
    df.printSchema()
    val rdd01:RDD[User] = df.rdd.map( row =>{
      User(row.getString(1),row.getLong(0).toInt)
    })
    rdd01.collect().foreach(println)

    spark.close()


  }

}
