package com.niit.spark.sql

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * Date:2025/5/14
 * Author：Ys
 * Description:
 *  用户自定义函数
 */
object Spark_UDF {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("Spark_UDF").master("local[*]").getOrCreate()
    spark.sparkContext.setLogLevel("ERROR")

    import spark.implicits._

    val df: DataFrame = spark.read.json("input/user.json")

    df.createOrReplaceTempView("user")

    //注册一个自定义函数，该函数对查询的名字前加上 ”Name：“
    spark.udf.register("newName",(name:String)=>{
      "Name:"+name
    })
    val resDF: DataFrame = spark.sql("select newName(username),age from user")

    resDF.show()
    spark.stop()
  }
}
