
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

object DataFrameDemo02 {
  //定义一个样例类
  case class Person(xid:Int,xname:String,xage:Int)
  def main(args: Array[String]): Unit = {
    //创建SparkSession对象
    val spark=SparkSession.builder().appName("sparksql-demo").master("local[2]").getOrCreate()
     //rdd--> dataframe
    //创建SparkContext: 由SparkSession对象得到SparkContext对象
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")
    val personRdd: RDD[String] = sc.textFile("data/person.txt")
    //调用toDF方法将rdd转换为dataframe ， 需要先导包
    import spark.implicits._
    //使用样例类，添加约束
    val personRdd1: RDD[Person] = personRdd.map(line => {
      val arr: Array[String] = line.split("\\s+")
      Person(arr(0).toInt, arr(1), arr(2).toInt)
    })

    val personDf: DataFrame = personRdd1.toDF()
    personDf.printSchema()
    personDf.show()
  }

}
