import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.Encoders


object PeopleDF {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("SparkSQLDemo")
      .master("local")
      .config("loglevel","ERROR")
      .config("spark.testing.memory", "2147480000")
      .getOrCreate()

    val peopleDF = spark.read.json("src/people/people.json")
    peopleDF.printSchema()
    peopleDF.select("name").show()
    peopleDF.select(peopleDF("age")).show()
    peopleDF.select(peopleDF("name"), peopleDF("age") + 1).show()
    peopleDF.groupBy("age").count().show()
    peopleDF.filter(peopleDF("age") > 23).show()

    peopleDF.sort(peopleDF("age").asc, peopleDF("name").asc).show()
    peopleDF.select(peopleDF("name").as("username"), peopleDF("age")).show()

    transDF(spark)
  }

  def transDF(spark:SparkSession): Unit ={
    import spark.implicits._
    val peopleRDD = spark.sparkContext.textFile("src/people/people.txt")
      .map(_.split(","))
      .map(attr => People(attr(0), attr(1).trim.toInt))
    val peopleDF = peopleRDD.toDF()

    peopleDF.createOrReplaceTempView("people") //创建临时DF表people
    val peosDFs = spark.sql("select u_name, u_age from people where u_age > 23")
    peosDFs.map(t => ("Name:"+t(0),"AGE:"+t(1))).show()
  }
  case class People(u_name:String, u_age:Int)
}
