// 源文件内容如下（包含id,name,age），将数据复制保存到ubuntu系统/usr/local/spark下，命名为employee.txt，实现从RDD转换得到DataFrame，并按id:1,name:Ella,age:36的格式打印出DataFrame的所有数据。请写出程序代码。（任选一种方法即可）
// 假设当前目录为/usr/local/spark/mycode/rddtodf，在当前目录下新建一个目录mkdir -p src/main/scala，然后在目录/usr/local/spark/mycode/rddtodf/src/main/scala下新建一个rddtodf.scala，复制下面代码；（下列两种方式任选其一）
// 方法一：利用反射来推断包含特定类型对象的RDD的schema，适用对已知数据结构的RDD转换；
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.Encoder
import org.apache.spark.sql.SparkSession
import spark.implicits._
object RDDtoDF {
    def main(args: Array[String]) {
case class Employee(id:Long,name: String, age: Long)
val spark = SparkSession
    .builder
    .master("local[*]")
    .appName("Simple Application")
    .getOrCreate()
val employeeDF = spark.sparkContext.textFile("file:///usr/local/spark/employee.txt").map(_.split(",")).map(attributes => Employee(attributes(0).trim.toInt,attributes(1), attributes(2).trim.toInt)).toDF()
employeeDF.createOrReplaceTempView("employee")
val employeeRDD = spark.sql("select id,name,age from employee")
employeeRDD.map(t => "id:"+t(0)+","+"name:"+t(1)+","+"age:"+t(2)).show()
    }
}
// 方法二：使用编程接口，构造一个schema并将其应用在已知的RDD上。
import org.apache.spark.sql.types._import org.apache.spark.sql.Encoder
import org.apache.spark.sql.Row
import org.apache.spark.sql.SparkSession 
object RDDtoDF {
def main(args: Array[String]) {
val spark = SparkSession
    .builder
    .master("local[*]")
    .appName("Simple Application")
    .getOrCreate()
val employeeRDD = spark.sparkContext.textFile("file:///usr/local/spark/employee.txt")
val schemaString = "id name age"
val fields = schemaString.split(" ").map(fieldName => StructField(fieldName, StringType, nullable = true))
val schema = StructType(fields)
val rowRDD = employeeRDD.map(_.split(",")).map(attributes => Row(attributes(0).trim, attributes(1), attributes(2).trim))
val employeeDF = spark.createDataFrame(rowRDD, schema)
employeeDF.createOrReplaceTempView("employee")
val results = spark.sql("SELECT id,name,age FROM employee")
results.map(t => "id:"+t(0)+","+"name:"+t(1)+","+"age:"+t(2)).show()
    }
}