package com.doit.beans.day06

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo09_Select {
  def main(args: Array[String]): Unit = {

    val session = SparkSession.builder()
      .appName("test")
      .master("local[*]")
      .getOrCreate()

    val jsonDF = session.read
      .json("data/log")

    /**
     * root
     * |-- app_version: string (nullable = true)
     * |-- event_name: string (nullable = true)
     * |-- guid: long (nullable = true)
     * |-- ts: long (nullable = true)
     */

    // 1 只能写名字
    jsonDF
      .select("guid", "event_name", "ts").show()
    //2 列对象
    jsonDF
      .select(jsonDF("guid").as("uid"), jsonDF("event_name"), jsonDF("ts")).show()
    import org.apache.spark.sql.functions._
    jsonDF
      .select(col("guid").as("id"), col("event_name"), col("ts")).show()
    import session.implicits._
    jsonDF
      .select('guid.as("id2"), 'event_name, 'ts).show()
    jsonDF
      .select($"guid", $"event_name".as("eventName"), $"ts").show()

    // 方法调用  select  upper(event_name)   as  new_name
    jsonDF.select(upper($"event_name").as("new_name")).show()

    // selectExpr
    jsonDF.selectExpr("1+1 as  num", "upper(event_name) as  new_name").show()


    jsonDF.where("guid  > 1").select("guid" , "ts").show()
    jsonDF.where('guid .gt(1)).show() // gt  ge  lt  le  ne

    jsonDF.groupBy("guid").count().show()

    val res = jsonDF.groupBy("guid")
      .agg(("guid", "count"), "ts" -> "max")
      .toDF("uid" , "cnt" , "max_ts").show()

  }

}
