package day11

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

object InferringSchema {
  def main(args: Array[String]): Unit = {
    //模板代码
    val conf: SparkConf = new SparkConf().setAppName("InferringSchema").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    val sqlContext:SQLContext = new SQLContext(sc)

    //获取数据
    val lineRDD = sc.textFile(args(0)).map(_.split(","))

    //将RDD和case class关联
    val personRDD: RDD[Person] = lineRDD.map(x=>Person(x(0).toInt,x(1),x(2).toInt,x(3).toInt))

    //创建DataFrame
    import sqlContext.implicits._
    val personDF: DataFrame = personRDD.toDF

    //创建视图
    personDF.createOrReplaceTempView("t_person")

    //查询
    val df: DataFrame = sqlContext.sql("select * from t_person order by age desc limit 2")

    //输出
    df.write.json(args(1))

    sc.stop()
  }
}
case class Person(id:Int,name:String,age:Int,faceVal:Int){
  
}