package com.bigdata.hpsk.spark.sql

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object AvroSparkSQL {

  def main(args: Array[String]): Unit = {
    // Create SparkConf
    val sparkConf = new SparkConf()
      .setAppName("AvroSparkSQL Application")
      .setMaster("local[2]")

    // Create SparkContext
    val sc = SparkContext.getOrCreate(sparkConf)

    // Create SQLContext
    val sqlContext = SQLContext.getOrCreate(sc)


    // import needed for the .avro method to be added
    import com.databricks.spark.avro._

    val	episodes_df = sqlContext.read.avro("/datas/episodes.avro")

    // print schema
    episodes_df.printSchema()

    episodes_df.show(2, truncate = false)

    episodes_df.filter("doctor > 5").show()


    // WEB UI 4040
    Thread.sleep(10000000)

    // SparkContext Stop
    sc.stop()
  }

}
