package com.darrenchan.spark.sql

import org.apache.spark.sql.SparkSession

/**
  * DataFrame API基本操作
  */
object DataFrameApp {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local[2]").appName("DataFrameApp").getOrCreate()


    //将json文件加载成一个dataframe
    //spark.read.format("json").load("people.json") 两种写法等价
    val peopleDF = spark.read.json("people.json")

    // 输出dataframe对应的scheme信息
    peopleDF.printSchema()

    //输出数据集的前20条记录(可以传入记录条数，默认是20)
    peopleDF.show()

    //查询某几列所有数据，select name,age from table
    //下面两种方式等价
    //方法1
    peopleDF.createOrReplaceTempView("people")
    spark.sql("select name, age from people").show()
    //方法2
    peopleDF.select("name", "age").show()

    //查询某几列所有数据，select name,age+10 from table(另一种写法)
    peopleDF.select(peopleDF.col("name"), (peopleDF.col("age") + 10).as("age2")).show()


    //条件过滤(where) select * from table where age>19
    peopleDF.filter(peopleDF.col("age") > 19).show()

    //根据某一列进行分组，然后再进行聚合操作 select age, count(*) from table group age
    peopleDF.groupBy("age").count().show()

    spark.stop()
  }
}
