package com.txl.cn.spark05

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by txl on 2018/1/2.
  */
object SqlTest2 extends  App{
 val conf=new SparkConf().setMaster("local").setAppName("sqlSpark")
  val sc = new SparkContext(conf)
   val sqlc = new SQLContext(sc)
   val data: RDD[String] = sc.parallelize(Array("laoduan 9999 30","laozhao 99 32","zs 98 22"))
   val res = data.map({
    t =>
      val str = t.split(" ")
      val name = str(0)
      val fv = str(1).toInt
      val age = str(2).toInt
      Row(name, fv, age)
  })

  val personSchema = StructType(
    List(
      StructField("name", StringType, true),
        StructField("fv", IntegerType),
        StructField("age", IntegerType)
    )
  )

 val personDF = sqlc.createDataFrame(res,personSchema)
 /*personDF.createTempView("spark")
   val dataFrame2 = sqlc.sql("select * from spark where fv>99")
  dataFrame2.show()*/
  val df3=personDF.select("name","fv","age")
  //val value = df3.where("age > 22")
  //操作对象的隐式转换
  import sqlc.implicits._
  val value2=df3.orderBy($"age"desc)
  value2.show()

  sc.stop()

}

