import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.Encoder
import org.apache.spark.sql.SparkSession
import spark.implicits._
object RDDtoDF {
    def main(args: Array[String]) {
case class Employee(id:Long,name: String, age: Long)
val spark = SparkSession
.builder
.master("local[*]")
.appName("Simple Application")
.getOrCreate()
val employeeDF = spark.sparkContext.textFile("file:///usr/local/spark/employee.txt").map(_.split(",")).map(attributes => Employee(attributes(0).trim.toInt,attributes(1), attributes(2).trim.toInt)).toDF()
employeeDF.createOrReplaceTempView("employee")
val employeeRDD = spark.sql("select id,name,age from employee")
employeeRDD.map(t => "id:"+t(0)+","+"name:"+t(1)+","+"age:"+t(2)).show()
    }
}
name := "Simple Project"
version := "1.0"
scalaVersion := "2.12.15"
libraryDependencies += "org.apache.spark" % "spark-core" % "3.2.0"
libraryDependencies += "org.apache.spark" %% "spark-sql" % "3.2.0"
/usr/local/sbt/sbt package
/usr/local/spark/bin/spark-submit --class " RDDtoDF "  /usr/local/spark/mycode/rddtodf/target/scala-2.12/simple-project_2.12-1.0.jar