package com.txl.cn.spark05

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by txl on 2018/1/2.
  */
object SqlTest extends  App{
 val conf=new SparkConf().setMaster("local").setAppName("sql")
  val sc = new SparkContext(conf)
   val sqlc = new SQLContext(sc)
   val data: RDD[String] = sc.parallelize(Array("laoduan 9999 30","laozhao 99 32","zs 98 22"))
   val res = data.map({
    t =>
      val str = t.split(" ")
      val name = str(0)
      val fv = str(1).toInt
      val age = str(2).toInt
      Person(name, fv, age)
  })

  import sqlc.implicits._
  val pDF: DataFrame = res.toDF
  pDF.createTempView("sparksql")

   val dataFrame = sqlc.sql("select * from sparksql where fv>98")
  dataFrame.show()
  sc.stop()

}
case class Person(name :String,fv:Int,age:Int) // extends  Serializable
