package com.sql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object SparkSQL {
  def main(args: Array[String]): Unit = {
    val conf=new SparkConf().setMaster("local").setAppName("sql_test")
    val sc=new SparkContext(conf)
    val sqlContext=new SQLContext(sc)

    val dataFrame:DataFrame = sqlContext.read.format("json").load("./json.json")

    dataFrame.show()
    dataFrame.printSchema()
    /**
     * 类似hibernate的jpa形式
     * column按照assi进行排列
    */
//    val result=dataFrame.select(dataFrame.col("name"),dataFrame.col("age"))
//        .where(dataFrame.col("age").gt(18)).show()

    //注册临时表 ,temp表不在内存中，也不在磁盘中，类似于指针
    dataFrame.registerTempTable("temp")
    val result=sqlContext.sql("select * from temp where age>18")
    val rdd:RDD[Row] = result.rdd
      rdd.foreach(r=>{println(r.get(0))})



    sc.stop()
  }

}
