package jar

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.log4j.Level
import org.apache.log4j.Logger
object SparkLocalTest {


  def main(args: Array[String]): Unit = {
    var sparkConfig=new SparkConf().setMaster("local").setAppName("sparkRddTest")

    var sc=new SparkContext(sparkConfig)

    //日志显示级别,设置打印error级别的错误日志
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.ERROR)

    //    var rdd=sc.textFile("/Users/mac/OLAP.md")
    //    rdd.flatMap(_.split(" ")).map(_.toLowerCase).take(10).foreach(println)



    sparkSqlTest(sc)

  }

  val empFilePath="/Users/mac/employee.json"
  val odrFilePath="/Users/mac/order.json"

  def sparkSqlTest(sparkContext: SparkContext): Unit ={
    val sqlContext=new SQLContext(sparkContext)
    val empTable=sqlContext.read.json(empFilePath)
    val odrTabel=sqlContext.read.json(odrFilePath)
    

    // base operation
    empTable.show

    empTable.printSchema

    empTable.select("name").show

    empTable.select(empTable("name"),empTable("age")+1).show

    empTable.filter(empTable("age")>26).select(empTable("name"),empTable("age")).show

    empTable.groupBy("age").count.show

    empTable.select(empTable("id"),empTable("name"),empTable("age")).foreach(
      x => {
        println("userid : "+x(0)+"    username : "+x(1)+"     userage : "+x(2))
      }
    )


    // limit
    odrTabel.limit(4).show(false)

    //


  }



}
