package com.scala.learn.sparksql1

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * @Copyright: Shanghai Definesys Company.All rights reserved.
  * @Description:
  * @author: chuhaitao
  * @since: 2019/3/9 14:57
  * @history:
  *          1.2019/3/9 created by chuhaitao
  */
object SqlDemo2 {
  /**
    * 使用2.0之前的方式
    **/
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("TopN").setMaster("local")

    val sc = new SparkContext(conf)


    val lines = sc.parallelize(List("2,laozhuang,20,99", "3,xiaoming,18,199", "4,xiaohuang,20,200", "5,xiaoniu,18,99"))

    val rowrRDD: RDD[Row] = lines.map(_.split(","))
      .map(arr => Row(arr(0).toLong, arr(1), arr(2).toInt, arr(3).toInt))


    val sqlContext = new SQLContext(sc)
    //导入隐式转换，转换成dataFrame
    import sqlContext.implicits._

    //创建schame
    val schame = StructType(
      //名称类型，是否可以为空
      List(
        StructField("id", LongType, true),
        StructField("name", StringType, true),
        StructField("age", IntegerType, true),
        StructField("fv", IntegerType, true)

      )
    )
    //创建dataFrame
    val pdf = sqlContext.createDataFrame(rowrRDD, schame)
    //注册
    pdf.registerTempTable("user")
    //1 使用sqlContext
    val result = sqlContext.sql("select * from user order by fv desc ,age asc ")

    result.show()


  }


}
