package org.wj.sql

import org.apache.spark.sql.DataFrame
import org.wj.config.LocalSparkSession

object CreateDataFrame extends LocalSparkSession{
  import spark.implicits._
  private val frame: DataFrame = spark.read.json("P:\\Project\\Idea\\bigdata\\spark\\src\\main\\resources\\data\\users.json")
  frame.show()
  frame.count()
  frame.filter($"sex" === "Female").filter($"age"<20).show()

  frame.select($"name", $"sex", $"age").filter($"age">18).show()

  frame.groupBy($"sex").agg(Map("age"->"max", "name"->"count")).show()

}
