package day11


import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SQLContext}

object SpecifyingSchema {
  def main(args: Array[String]): Unit = {
    //模板代码
    val conf: SparkConf = new SparkConf().setAppName("SpecifyingSchema").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    val sqlContext:SQLContext = new SQLContext(sc)

    //获取数据
    val lineRDD = sc.textFile(args(0)).map(_.split(","))

    //创建 通过StructType指定每个字段 的 schema
    var schema = StructType(List(
      StructField("id",IntegerType,true),
      StructField("name",StringType,true),
      StructField("age",IntegerType,true),
      StructField("faceVal",IntegerType,true)
    ))

    //将RDD映射到ROWRDD并创建DataFrame
    val rowRDD: RDD[Row] = lineRDD.map(x=> Row(x(0).toInt,x(1),x(2).toInt,x(3).toInt))
    val personDF: DataFrame = sqlContext.createDataFrame(rowRDD,schema)

    //创建视图
    personDF.createOrReplaceTempView("t_person")

    //查询
    val df: DataFrame = sqlContext.sql("select * from t_person order by age desc limit 2")

    //输出
    df.write.json(args(1))

    sc.stop()
  }
}
