package com.imooc.spark

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

/**
  * Created by zghgchao 2017/12/23 10:18
  * DataFrame API基本操作
  */
object DataFrameApp {
  def main(args: Array[String]): Unit = {
    SetLogger

    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("DataFrameApp").getOrCreate()

    // 将json文件加载成一个dataframe
    //    spark.read.format("json").load("src/data/people.json")
    val peopleDF = spark.read.json("src/data/people.json")

    // 输出dataframe对应的schema信息
    peopleDF.printSchema()
    // 默认输出数据集的前20条数据
    peopleDF.show()

    // 查询某列的数据：select name from table
    peopleDF.select("name").show()

    // 查询某几列所有的数据，并对列进行计算：select name ,age+10 as age2 from table
    peopleDF.select(peopleDF.col("name"), (peopleDF.col("age") + 10).as("age2")).show()

    // 根据某一列的值进行过滤：select * from table where age > 19
    peopleDF.filter(peopleDF.col("age") > 19).show()

    // 根据某一列进行分组，然后再进行聚合操作：select age ,count(1) from table group by age
    peopleDF.groupBy("age").count().show()
    spark.stop()
  }

  def SetLogger() = {
    Logger.getLogger("org").setLevel(Level.OFF)
    Logger.getLogger("com").setLevel(Level.OFF)
    System.setProperty("spark.ui.showConsoleProgress", "false")
    Logger.getRootLogger().setLevel(Level.OFF);
  }
}
