package com.haozhen.sql

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/31  1:01
  */

case class Person (name:String,age:Int,height:Int)

object RowDemo {

  def main(args: Array[String]): Unit = {
    import java.lang

    import org.apache.spark.rdd.RDD
    import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}


    val spark: SparkSession = SparkSession.builder().appName(this.getClass.getCanonicalName().init).master("local[*]").getOrCreate()

    //导入函数

    import org.apache.spark.sql.{Row, types}
    import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
    import spark.implicits._


    val ds1: Dataset[lang.Long] = spark.range(1 , 100,5)

    ds1.orderBy("id").show(5);
    val ds2: Dataset[Person] = spark.createDataset(Seq(Person("hz",29,170),Person("lc",28,170),Person("zx",30,175)))
    ds2.printSchema()
    // $"age">28  "age>28"
//    ds2.where($"age">28).show()
    ds2.where($"age">28).show()

//    val list = List(("Jack",28,184),("Tom",10,144),("Andy",16,165))

//    val frame: DataFrame = spark.createDataFrame(list)
//      .withColumnRenamed("_1","name1")
//      .withColumnRenamed("_2","age1")
//      .withColumnRenamed("_3","height1")
//    frame.orderBy("age1").show()
    import org.apache.spark.sql.functions._
//    frame.orderBy(desc("age1")).show()
//
//    val frame2: DataFrame = spark.createDataFrame(list).toDF("name2","age2","height2")
//    frame2.printSchema()


    //RDD 装成DF
//    var arr = Array(("Jack",28,184),("Tom",10,144),("Andy",16,165))
//    var rdd1 = spark.sparkContext.makeRDD(arr).map(v=>Row(v._1,v._2,v._3))
//    var schema = types.StructType(StructField("name",StringType,false)::
//                                    StructField("age",IntegerType,false)::
//                                    StructField("height",IntegerType,false)::Nil)
//    var schema =   new StructType().add("name","string",false)
//                                      .add("age","int",false)
//                                      .add("height","int",false)
//    val rddToDF = spark.createDataFrame(rdd1,schema)
//    rddToDF.orderBy(desc("age")).show()
      var arr2 = Array(("Jack",28,184),("Tom",10,144),("Andy",16,165))

        val rdd2: RDD[Person] = spark.sparkContext.makeRDD(arr2).map(v=>Person(v._1,v._2,v._3))

      val ds3:Dataset[Person] = rdd2.toDS()
      val df1: DataFrame = rdd2.toDF()
      df1.orderBy(desc("name")).show()
    ds3.printSchema()
    var ds4 = spark.createDataset(rdd2)
    spark.close()
  }
}
