package spark.example

import org.apache.spark.sql.SparkSession

object sparkSqlExample {
  def main(args: Array[String]) {
    val spark = SparkSession.builder()
      .appName("Spark SQL Example")
      .master("local")
      .config("spark.testing.memory", "471859200")
      .getOrCreate()
    val input = "hdfs://master:9000/spark/people.json"
    //createDataFrames(spark,input)
    //dataFrameOperations(spark,input)
    val df = spark.read.format("json").load(input)
    df.show()
    df.printSchema()
  }
  def createDataFrames(spark: SparkSession,filePath: String)  = {
    //spark.read.json(filePath)
    val df = spark.read.format("json").load(filePath)
    df.show()
    
  }

 // def dataFrameOperations(spark: SparkSession,filePath: String) = {
 //   import spark.implicits._
 //   val df = createDataFrames(spark,filePath)

 //   println("df content")
 //   df.show()

 //   println("df schema: ")
 //   df.printSchema()

 //   println("select only the name column")
 //   df.select($"name").show()

 //   println("select everbody,but increment the age by 1")
 //   df.select($"name",$"age" + 1).show()

 //   println("select people older than 21")
 //   df.filter($"age" > 21).show()

 //   println("count people by age")
 //   df.groupBy("age").count().show()
 // }
}
