package sparkSql

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object HelloSparkSql {
  case class personObj(name:String,age:Int,height:Int)
  def main(args: Array[String]): Unit = {

    val conf=new SparkConf().setMaster("local[*]").setAppName("sss")
    val sc=new SparkContext(conf)
    val sqlCon=new SQLContext(sc)
    sc.setLogLevel("error")
    val personList =List(personObj("xiaowang",18,180),personObj("xz",20,175),personObj("gcx",100,150))
    val personRDD=sc.parallelize(personList)
    import sqlCon.implicits._
    val personDf=personRDD.toDF
    personDf.show()
    personDf.registerTempTable("persoon")
    sqlCon.sql("select *from person where age>=20").show()
    /*
    personDf.write.save("")//数据加密
    personDf.write.json("")//数据不加密
*/



  }

}
