

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

object DataFrameDemo03 {
  //定义一个样例类
  case class Person(xid:Int,xname:String,xage:Int)
  def main(args: Array[String]): Unit = {
    //创建SparkSession对象
    val spark=SparkSession.builder().appName("sparksql-demo").master("local[2]").getOrCreate()
     //rdd--> dataframe
    //创建SparkContext: 由SparkSession对象得到SparkContext对象
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")

    val personDf: DataFrame = spark.read
      .option("header","false")
      .option("inferSchema","true") //自动侦测每一列数据类型
      .csv("data/csv").toDF("id","name","age")
    personDf.printSchema()
    personDf.show


  }

}
