package ScalaSpark

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object RDD2DataFrameReflection extends App {
  val conf = new SparkConf().setMaster("local").setAppName("RDD2DataFrameReflection")
  val sc = new SparkContext(conf)
  val sqlContext = new SQLContext(sc)

  case class Student(id: Int, name: String, age: Int)

  /*val studentDF = sc.textFile("", 1)
    .map { line => line.split(",") }
    .map { arr => Student(arr(0).trim().toInt, arr(1), arr(2).trim.toInt) }
    .toDF()

  //在Scala中使用反射方式，进行RDD到DataFrame转换，需要手动导入一个隐式转换

  studentDF.regis*/



}
