package com.hdaccp.beike
import org.apache.spark.sql.SparkSession
/**
  * DataFrame 与RDD互操作
  */
object Demo4 {
  def main(args: Array[String]): Unit = {
    //1.
    val spark = SparkSession.builder()
      .appName("beikeDemo3App")
      .master("local[2]")
      .getOrCreate()
    import spark.implicits._
    val rdd = spark.sparkContext.textFile("f:/resources/people.txt")

    val dataFrame = rdd.map((x:String)=>x.split(" ")).map(y=>People(y(0),y(1).toString.toInt)).toDF()

    dataFrame.show()
    println("---------------------------")
    dataFrame.map(x=>"name is :" + x(0)).show()

    spark.stop()
  }

  case class People(name:String,count:Int)
}
