package com.shujia.spark.sql

import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo10UDF {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("udf")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._


    /**
      * 定义自定义函数
      *
      */

    //可以直接用在DSl上
    val ageAdd: UserDefinedFunction = udf((age: Int) => {
      age + 1
    })

    //注册一个自定义函数，在sql中使用
    spark.udf.register("ageAdd", ageAdd)


    val studentDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",") //列的分割方式
      //指定表结构，必须按照顺序指定
      .schema("id STRING , name STRING, age INT , gender STRING , clazz STRING")
      .load("data/students.txt") //指定读取的路径

    studentDF.createOrReplaceTempView("student")

    studentDF
      //在DSL上使用自定义的函数
      .select($"id", ageAdd($"age") as "age")
    //.show()


    spark.sql(
      """
        |select id,ageAdd(age) from student
        |
      """.stripMargin)
      .show()

  }

}
