import org.apache.spark.sql.{Column, ColumnName, SparkSession}
import org.junit.Test

class column对象 {
  val spark = SparkSession.builder()
    .master("local[6]")
    .appName("Test")
    .getOrCreate()

  import spark.implicits._
  import org.apache.spark.sql.functions._

  /**
    * 创建列的几种方法
    * 创建无绑定的四种
    * 创建有绑定的俩种
    */
  @Test
  def createColTest(): Unit = {
    val ds = Seq(Person("zhangsan", 19), Person("lisi", 30), Person("zhangsan", 19)).toDS()
    val column1: Symbol = 'name

    val column2: ColumnName = $"name"

    val column3: Column = col("name")

    val column4: Column = column("name")

    //绑定具体的dataset
    val column5 = ds.col("name")

    val column6 = ds.apply("name")
  }

  /**
    * 给列该别名
    */
  @Test
  def as(): Unit = {
    val ds = Seq(Person("zhangsan", 19), Person("lisi", 30), Person("zhangsan", 19)).toDS()
    //select name,count(age) from table group by name
    //创建别名
    ds.select('name as "new_name").show()
    //转换类型
    ds.select('age.as[Long]).show()
  }

  @Test
  def api(): Unit = {
    val ds = Seq(Person("zhangsan", 19), Person("lisi", 30), Person("zhangsan", 19)).toDS()
    //需求1：增加新的列，双倍年龄
    ds.withColumn("double_age", 'age * 2).show()
    //需求2：模糊查询
    ds.where('name like "zhang%")show()
    //需求3：排序，正反序
    ds.sort('age.desc).show()
    //需求4：枚举判断
    ds.where('name isin("zhangsan","zhaoliu")).show()
  }
}
