package com.shujia.spark.sql

import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo13UDF {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("udf")
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val studentDF: DataFrame = spark.read
      .format("csv")
      .option("sep", ",")
      .schema("id STRING ,name STRING ,age INT ,gender STRING ,clazz STRING")
      .load("data/students.txt")

    /**
     * 自定义函数
     */
    val subStr: UserDefinedFunction = udf((clazz: String) => {
      val str: String = clazz.substring(0, 2)
      str
    })

    //在DSL中使用自定义函数
    studentDF
      .select(subStr($"clazz") as "flag")
      .groupBy($"flag")
      .agg(count($"flag") as "count")
      .show()

    //在sql中使用自定义函数
    spark.udf.register("subStr", subStr)
    studentDF.createOrReplaceTempView("student")
    spark.sql(
      """
        |select subStr(clazz) as flag,count(1) from
        |student
        |group by subStr(clazz)
        |
        |""".stripMargin)
      .show()

  }

}
