package com.study.spark.scala.dataset

import org.apache.spark.sql.SparkSession

/**
 * filter和where的使用
 *
 * @author stephen
 * @date 2019-09-27 15:52
 */
object FilterWhereWhenDemo {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("FilterWhereWhenDemo")
      .master("local[*]")
      .getOrCreate()

    // 隐式转换
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val df = spark.createDataset(Seq((3, "zhangsan", 20), (4, "lisi", 30), (5, "wangwu", 40)))
      .toDF("id", "name", "age")

    // filter的几种写法
    df.filter($"age" > 30).show()
    // ===是在Column类中定义的函数，对应的不等于是=!=
    df.filter($"age" === 30).show()
    df.filter("name='zhangsan'").show()
    df.filter("age=30").show()
    df.filter($"age" === ($"id" * 10 - 10)).show()


    // where的几种写法，where实际上调用的是filter
    df.where($"age" =!= 20).show()
    df.where("age != 20").show()
    df.where(col("age") > ($"id" * 10)).show()

    // when
    df.select($"name",
      when(isnull($"name"), rand()).when($"ageZL" < 3, 5).otherwise(10).as("new_key1")
    ).show()

    spark.stop()
  }
}
