package com.hdaccp.ch04

import org.apache.spark.sql.SparkSession

object Demo3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("ch04Demo3App")
      .getOrCreate()

    import spark.implicits._

    //1.读取一个txt文件
    val rdd = spark.sparkContext.textFile("f:/resources/people.txt")

    //2.rdd 转换为 dataFrame
    val dataFrame =  rdd.map(x=>x.split(" ")).map(y=>People(y(0),y(1).toString.toInt)).toDF()

    dataFrame.show()
    spark.stop()
  }

  case class People(name:String,count:Int)
}
