import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.types._
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.Encoders

object RDDschemaDF {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("RDD2DFBySchema")
      .master("local")
      .config("spark.testing.memory", "2147480000")
      .getOrCreate()

    val peopleRDD = spark.sparkContext.textFile("src/people/people.txt")

    val schemeString = "name age" //可以以参数形式传进来
    val schemeField = schemeString.split(" ").map(fieldName => StructField(fieldName, StringType, nullable = true))
    val schemaType = StructType(schemeField)

    val rowRDD = peopleRDD.map(_.split(",")).map(attr => Row(attr(0), attr(1).trim()))

    import spark.implicits._
    val peopleDF = spark.createDataFrame(rowRDD, schemaType)

    peopleDF.createOrReplaceTempView("people")

    val results = spark.sql("select name,age from people")
    results.map(p => ("name:"+p(0), "age:"+p(1))).show()
  }

}
